Slow ElasticSearch Serilog - elasticsearch

My C# dotnet core web application uses serilog with elasticsearch which is configured like the code below. Whenever elastic search server has a problem my server's Http queries slow down to being unsusable. Let's say if elasticsearch is down each query takes 15 seconds or so. I assume each query tries to log the message and waits for the elasticserch server to respond. I understand that I can play with elasticsearch timeout but that is not a solution. Is there a better way not to limit the application performance by the logging method ?
"Serilog": {
"IncludeScopes": true,
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "....\\log.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:o} [{Level:u3}] {Message}{NewLine}{Exception}"
}
},
{
"Name": "Elasticsearch",
"Args": {
"nodeUris": "http://localhost:9200",
"typeName": "SomeApp",
"batchPostingLimit": 50,
"restrictedToMinimumLevel": "Information",
"bufferFileSizeLimitBytes": 5242880,
"bufferLogShippingInterval": 5000,
"bufferRetainedInvalidPayloadsLimitBytes": 5000,
"bufferFileCountLimit": 31,
"connectionTimeout": 5,
"queueSizeLimit": "100000",
"autoRegisterTemplate": true,
"overwriteTemplate": false
}
}
],
"Enrich": [
"FromLogContext"
],
"Properties": {
"Application": "SomeApp",
"Environment": "SomeApp.Production"
}
}
The middleware part looks like this
public async Task Invoke(HttpContext context, ILogger logger, ILogContext logContext)
{
context.Request.EnableBuffering();
var stopWatch = new Stopwatch();
stopWatch.Start();
try
{
await _next.Invoke(context);
}
finally
{
stopWatch.Stop();
await LogRequest(context, logger, stopWatch.ElapsedMilliseconds, logContext);
}
}
private async Task LogRequest(HttpContext context, ILogger logger, long elapsedMs, ILogContext logContext){
logger.Information(LogConstants.RequestMessageTemplate, message);
}

Related

Are transactions in a batch serialized?

if I send a transaction with 2 contract fn calls, does the 2nd gets executed if the 1st fails?
sample code:
const actions = [
TX.functionCall("select_staking_pool", { staking_pool_account_id: stakingPool }, this.BASE_GAS.muln(3), new BN(0)),
TX.functionCall("deposit_and_stake", { amount: near.ntoy(amountNear) }, this.BASE_GAS.muln(5), new BN(0))
]
return near.broadcast_tx_commit_actions(actions, sender, lockupContract, privateKey)
I did execute this batch, but the result gives me no clear answer: https://explorer.testnet.near.org/transactions/DcchgGdzAVEvNqRReHkjsmrTr5PoaMoibs3fyKoKBYSe
The 1st fn call code uses cross-contract calls and a callback code to check if the contract is whitelisted, but I can't see the callback's log code in the receipt logs. I assume the 1st call fails because the second fails with the error: "staking pool is not selected"
Here's the full result:
{
"status": {
"Failure": {
"ActionError": {
"index": 1,
"kind": {
"FunctionCallError": {
"HostError": {
"GuestPanic": {
"panic_msg": "panicked at 'Staking pool is not selected', src/internal.rs:90:9"
}
}
}
}
}
}
},
"transaction": {
"signer_id": "lucio.testnet",
"public_key": "ed25519:Cvqie7SJ6xmLNA5KoTAYoUAkhD25KaJLG6N9oSmzT9FK",
"nonce": 33,
"receiver_id": "274e981786efcabbe87794f20348c1b2af6e7963.lockupy.testnet",
"actions": [
{
"FunctionCall": {
"method_name": "select_staking_pool",
"args": "eyJzdGFraW5nX3Bvb2xfYWNjb3VudF9pZCI6Im5vcnRoZXJubGlnaHRzLnN0YWtpbmdwb29sIn0=",
"gas": 75000000000000,
"deposit": "0"
}
},
{
"FunctionCall": {
"method_name": "deposit_and_stake",
"args": "eyJhbW91bnQiOiIxMDk0OTkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwIn0=",
"gas": 125000000000000,
"deposit": "0"
}
}
],
"signature": "ed25519:gzCsrcobVtjuD6FE4qDhgo9zdk2feBp6dfa66NZYLPTnwypF7g8mDM7yubcnkVcF1sPzEFeEfJtQ4hwwnhrxTa6",
"hash": "EjiQBjzF6Ea74wSHkjmZMkhTGwgYxbUdeVKUvzaA8X7k"
},
"transaction_outcome": {
"proof": [],
"block_hash": "3xEuw43E1W4FP2q1Un6XLsDSbhRc7c7FMtGWWL6i3KJj",
"id": "EjiQBjzF6Ea74wSHkjmZMkhTGwgYxbUdeVKUvzaA8X7k",
"outcome": {
"logs": [],
"receipt_ids": [
"75pJCVuheYCjX82RspQzoKm5v7NSG1QNUaPK7DRzZYNt"
],
"gas_burnt": 4748079879222,
"tokens_burnt": "474807987922200000000",
"executor_id": "lucio.testnet",
"status": {
"SuccessReceiptId": "75pJCVuheYCjX82RspQzoKm5v7NSG1QNUaPK7DRzZYNt"
}
}
},
"receipts_outcome": [
{
"proof": [
{
"hash": "55YXLhqDc63f8n4MdZNwi6TdmtFkaYp18wKwm7qFmDo5",
"direction": "Left"
}
],
"block_hash": "DB3p49tiSBfRLv5diDUStajtDiMNs2KroKfRcr2cQDnV",
"id": "75pJCVuheYCjX82RspQzoKm5v7NSG1QNUaPK7DRzZYNt",
"outcome": {
"logs": [
"Selecting staking pool #northernlights.stakingpool. Going to check whitelist first."
],
"receipt_ids": [
"HXStMQuf5aWKVpWDM1Hf4EdA3S4FLTGjmYaF2WAYRcUz"
],
"gas_burnt": 20291176638173,
"tokens_burnt": "2029117663817300000000",
"executor_id": "274e981786efcabbe87794f20348c1b2af6e7963.lockupy.testnet",
"status": {
"Failure": {
"ActionError": {
"index": 1,
"kind": {
"FunctionCallError": {
"HostError": {
"GuestPanic": {
"panic_msg": "panicked at 'Staking pool is not selected', src/internal.rs:90:9"
}
}
}
}
}
}
}
}
},
{
"proof": [],
"block_hash": "2kua8s7vjix2MiMHZ7Rzbrhyv8UbGFEpvWeT67H2qCYM",
"id": "HXStMQuf5aWKVpWDM1Hf4EdA3S4FLTGjmYaF2WAYRcUz",
"outcome": {
"logs": [],
"receipt_ids": [],
"gas_burnt": 0,
"tokens_burnt": "0",
"executor_id": "lucio.testnet",
"status": {
"SuccessValue": ""
}
}
}
]
}
Edit: PS. It looks like the 1st function call was failing silently (regarding Tx result). I tested the same calls as 2 separate transactions and the 1st one (select_staking_pool) succeeded.
All later actions after the first failed action are not executed. And their execution fees are refunded. All the changes that were successfully executed before, will be reverted and all the promises will not be scheduled and executed either.
Your case is more complicated, because the first action succeeds by returning a promise. The resulting promise later will fails in the callback, but the second action fails immediately, because the staking pool is not selected yet due to async execution. So the first promise doesn't get scheduled.
EDIT 1.
Once a transaction or a receipt succeeds (finishes all actions) it doesn't rollback anything. So if any future promises fail they are going to be independently executing from each other. Also only the last action in a batch of actions returns the result for the entire receipt.

Serilog does not write files after installing the service with sc.exe

I have developed a net core 3.1 service for windows, the service works fine, but fails to write the log file.
during debugging Serilog writes the file correctly, but once installed with sc it writes nothing.
Program.cs
public class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.UseWindowsService()
.UseSerilog((hostingContext, loggerConfig) =>
loggerConfig.ReadFrom.Configuration(hostingContext.Configuration)) // custom log event
.ConfigureServices((hostContext, services) =>
{
IConfiguration configuration = hostContext.Configuration; //prendi la configurazione
ServiceInfo siOption = configuration.GetSection("ServiceInfo").Get<ServiceInfo>();
services.AddSingleton(siOption);
services.AddHostedService<Worker>();
});
}
appsettngs.json
"Serilog": {
"Using": [ "Serilog.Sinks.Console", "Serilog.Sinks.RollingFile" ],
"MinimumLevel": {
"Default": "Debug",
"Override": {
"Default": "Information",
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Information"
}
},
"WriteTo": [
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByIncludingOnly",
"Args": {
"expression": "(#Level = 'Error' or #Level = 'Fatal' or #Level = 'Warning')"
}
}
],
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "Logs/ex_.log",
"outputTemplate": "{Timestamp} [{Level:u3}] {Message}{NewLine}{Exception}",
//"outputTemplate": "{Timestamp:o} [{Level:u3}] ({SourceContext}) {Message}{NewLine}{Exception}",
"rollingInterval": "Day",
"retainedFileCountLimit": 7
}
}
]
}
}
}
],
"Enrich": [
"FromLogContext",
"WithMachineName"
],
"Properties": {
"Application": "ORAMS-II Service Status Telegram"
}
}
}
I don't know what the problem could be, installed on a linux machine writes the file correctly

Serilog best aproach for outputing to file and elasticsearch

I used to ship my data to Elasticsearch by FileBeat-LogStash pipeline. Processed my logs which was created via log4net, mutated them, and sent required fields towards elastic.
Now I would like to replace my logic by removing the FileBeat and Logstash and make use of Serilog and it's elasticsearch sink.
To broader the picture I have an API endpoint which receives requests which I need to log to a textual file as they are so I need a File sink. Further down the code, my business logic will make use of data received and among else create an object which I then need to ingest to an index at elastic.
What's the best approach for this, have one Serilog instance and use some kind of filtering or have two Serilog instances? I'm closer to decorating (enrich) my cases and then using sinks by filtering (one Serilog instance) but because I'm a novice with Serilog I don't know how to set up the whole thing.
The abbreviated code would be something like this,
My controller class:
public class RequestController : ControllerBase
{
private readonly BLService _service = new BLService(Log.Logger);
[Route("Test")]
[HttpPost]
public IActionResult Test([FromBody]SampleRequest request)
{
var logId = Guid.NewGuid().ToString();
using (LogContext.PushProperty("LogId", logId))
Log.Information("{#request}", request);
var tran = new SampleTran
{
SampleTranType = "Test",
SampleTranId = request.Id,
EventTime = DateTime.Now
};
_service.ProcessTransaction(tran);
return new OkResult();
}
}
And my service where I'm adding property "Type" with constant value "ElkData" which I could then filter on:
public class BLService
{
private readonly ILogger _log;
public BLService(ILogger logger)
{
_log = logger.ForContext("Type", "ElkData");
}
public void ProcessTransaction(SampleTran transaction)
{
var elkData = DoSomeStuffAndReturnElkTransactionToStore(transaction);
_log.Information("{#ElkData}", elkData );
}
}
One note, my text file should only contain raw requests (without elasticsearch data). So far I'm writing all to file, and my appsettings.json looks like this:
{
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "C:\\DEV\\Logs\\mylog-.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:yyyy-MM-ddTHH:mm:ss.fff zzz} [{Level:u3}] {Message:j}{NewLine}{Exception}"
}
}
],
"Enrich": [ "FromLogContext" ]
},
"AllowedHosts": "*"
}
I need to add the elastic part using filtering, am I right? Any help would be appreciated.
Here's how I managed to do what I need:
I've used ForContext to enrich my log items. So in the controller, I used:
var requestLog = Log.ForContext("Type", "Request");
requestLog.Information("Request: {#request}", request);//this needs to go to the log file
the code in BLservice stays the same and the filtering is described in the appsettings.json as:
{
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByExcluding",
"Args": {
"expression": "Type = 'ElkData'"
}
}
],
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "C:\\DEV\\Logs\\mylog-.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:yyyy-MM-ddTHH:mm:ss.fff zzz} [{Level:u3}] {Message:j}{NewLine}{Exception}",
"shared": true
}
}
]
}
}
},
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByIncludingOnly",
"Args": {
"expression": "Type = 'ElkData'"
}
}
],
"WriteTo": [
{
"Name": "Elasticsearch",
"Args": {
"nodeUris": "<your elastic url>",
"TypeName": "_doc",
"IndexFormat": "serilog_data",
"InlineFields": true,
"BufferBaseFilename": "C:\\DEV\\Logs\\elk_buffer"
}
}
]
}
}
}
]
}
}
So the file will contain everything that is logged out except logs that carry "Type = 'ElkData'" enrichment, those will end up in elasticsearch index.
Hope this simple approach will help some serilog novice out there someday

Creating a custom adapter in Botframework for Actions on Google

I'm currently writing a custom adapter in Typescript to connect Google Assistant to Microsoft's Botframework. In this adapter I'm attempting to capture the Google Assistant conversation object through a webook call and change it using my bot.
At this moment the only thing that my bot is doing is receive the request from Actions on Google and parsing the request body into an ActionsOnGoogleConversation object. After this I call conv.ask() to try a simple conversation between the two services.
Api Endpoint:
app.post("/api/google", (req, res) => {
googleAdapter.processActivity(req, res, async (context) => {
await bot.run(context);
});
});
Adapter processActivity function:
public async processActivity(req: WebRequest, res: WebResponse, logic: (context: TurnContext) => Promise<void>): Promise<void> {
const body = req.body;
let conv = new ActionsSdkConversation();
Object.assign(conv, body);
res.status(200);
res.send(conv.ask("Boo"));
};
When I try to start the conversation I get the following error in the Action on Google console.
UnparseableJsonResponse
API Version 2: Failed to parse JSON response string with
'INVALID_ARGUMENT' error: "availableSurfaces: Cannot find field." HTTP
Status Code: 200.
I've already checked the response and I can find a field called availableSurfaces in the AoG console and when I call my bot using Postman.
Response:
{
"responses": [
"Boo"
],
"expectUserResponse": true,
"digested": false,
"noInputs": [],
"speechBiasing": [],
"_responded": true,
"_ordersv3": false,
"request": {},
"headers": {},
"_init": {},
"sandbox": false,
"input": {},
"surface": {
"capabilities": [
{
"name": "actions.capability.MEDIA_RESPONSE_AUDIO"
},
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.ACCOUNT_LINKING"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
}
]
},
"available": {
"surfaces": {
"list": [],
"capabilities": {
"surfaces": []
}
}
},
"user": {
"locale": "en-US",
"lastSeen": "2019-11-14T12:40:52Z",
"userStorage": "{\"data\":{\"userId\":\"c1a4b8ab-06bb-4270-80f5-958cfdff57bd\"}}",
"userVerificationStatus": "VERIFIED"
},
"arguments": {
"parsed": {
"input": {},
"list": []
},
"status": {
"input": {},
"list": []
},
"raw": {
"list": [],
"input": {}
}
},
"device": {},
"screen": false,
"body": {},
"version": 2,
"action": "",
"intent": "",
"parameters": {},
"contexts": {
"input": {},
"output": {}
},
"incoming": {
"parsed": []
},
"query": "",
"data": {},
"conversation": {
"conversationId": "ABwppHEky66Iy1-qJ_4g08i3Z1HNHe2aDTrVTqY4otnNmdOgY2CC0VDbyt9lIM-_WkJA8emxbMPVxS5uutYHW2BzRQ",
"type": "NEW"
},
"inputs": [
{
"intent": "actions.intent.MAIN",
"rawInputs": [
{
"inputType": "VOICE",
"query": "Talk to My test app"
}
]
}
],
"availableSurfaces": [
{
"capabilities": [
{
"name": "actions.capability.AUDIO_OUTPUT"
},
{
"name": "actions.capability.SCREEN_OUTPUT"
},
{
"name": "actions.capability.WEB_BROWSER"
}
]
}
]
}
Does anyone know what might be causing this? I personally feel that creating the ActionsSdkConversation could be the cause, but I've not found any examples of using Google Assistant without getting the conv object from the standard intent handeling setup.
So I managed to fix it by changing the approach, instead of having an API point that fits the structure of bot framework I changed it to the intenthandler of AoG.
Google controller
export class GoogleController {
public endpoint: GoogleEndpoint;
private adapter: GoogleAssistantAdapter;
private bot: SampleBot;
constructor(bot: SampleBot) {
this.bot = bot;
this.adapter = new GoogleAssistantAdapter();
this.endpoint = actionssdk();
this.setupIntents(this.endpoint);
};
private setupIntents(endpoint: GoogleEndpoint) {
endpoint.intent(GoogleIntentTypes.Start, (conv: ActionsSdkConversation) => {
this.sendMessageToBotFramework(conv);
});
endpoint.intent(GoogleIntentTypes.Text, conv => {
this.sendMessageToBotFramework(conv);
});
};
private sendMessageToBotFramework(conv: ActionsSdkConversation) {
this.adapter.processActivity(conv, async (context) => {
await this.bot.run(context);
});
};
};
interface GoogleEndpoint extends OmniHandler, BaseApp , ActionsSdkApp <{}, {}, ActionsSdkConversation<{}, {}>> {};
Once the conv object was in the adapter, I used the conv object to create an activity which the bot used to do its things and saved it in state using context.turnState()
Adapter ProcessActivity
public async processActivity(conv: ActionsSdkConversation, logic: (context: TurnContext) => Promise<void>): Promise<ActionsSdkConversation> {
const activty = this.createActivityFromGoogleConversation(conv);
const context = this.createContext(activty);
context.turnState.set("httpBody", conv);
await this.runMiddleware(context, logic);
const result = context.turnState.get("httpBody");
return result;
};
Bot
export class SampleBot extends ActivityHandler {
constructor() {
super();
this.onMessage(async (context, next) => {
await context.sendActivity(`You said: ${context.activity.text}`);
await next();
});
}
Once the bot send a response, I used the result to modify the conv object, save it and then return it in processActivity().
private createGoogleConversationFromActivity(activity: Partial<Activity>, context: TurnContext) {
const conv = context.turnState.get("httpBody");
if (activity.speak) {
const response = new SimpleResponse({
text: activity.text,
speech: activity.speak
});
conv.ask(response);
} else {
if (!activity.text) {
throw Error("Activity text cannot be undefined");
};
conv.ask(activity.text);
};
context.turnState.set("httpBody", conv);
return;
};
That resulted into a simple conversation between Google Assistant and Bot Framework.

Swagger use a custom swagger.json file aspnet core

Pretty sure I am missing something clearly obvious but not seeing it.
How can I use my updated swagger.json file?
I took my boilerplate swagger/v1/swagger.json code and pasted it into the editor.swagger.io system. I then updated the descriptions etc, added examples to my models and then saved the contents as swagger.json.
Moved the file into the root of my api application, set the file to copy always.
public void ConfigureServices(IServiceCollection services)
{...
services.AddSwaggerGen(c => { c.SwaggerDoc("V1", new Info {Title = "Decrypto", Version = "0.0"}); });
}
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
...
app.UseSwagger();
//--the default works fine
// app.UseSwaggerUI(c => { c.SwaggerEndpoint("/swagger/V1/swagger.json", "Decrypto v1"); });
app.UseSwaggerUI(c => { c.SwaggerEndpoint("swagger.json", "Decrypto v1"); });
app.UseMvc();
}
I have tried a few different variation but none seem to be the trick. I don't really want to rewrite the work in SwaggerDoc as it seems dirty to me put documentation in the runtime.
the custom swagger.json file I want to use looks like this:
{
"swagger": "2.0",
"info": {
"version": "0.0",
"title": "My Title"
},
"paths": {
"/api/Decryption": {
"post": {
"tags": [
"API for taking encrypted values and getting the decrypted values back"
],
"summary": "",
"description": "",
"operationId": "Post",
"consumes": [
"application/json-patch+json",
"application/json",
"text/json",
"application/*+json"
],
"produces": [
"text/plain",
"application/json",
"text/json"
],
"parameters": [
{
"name": "units",
"in": "body",
"required": true,
"schema": {
"uniqueItems": false,
"type": "array",
"items": {
"$ref": "#/definitions/EncryptedUnit"
}
}
}
],
"responses": {
"200": {
"description": "Success",
"schema": {
"uniqueItems": false,
"type": "array",
"items": {
"$ref": "#/definitions/DecryptedUnit"
}
}
}
}
}
}
},
"definitions": {
"EncryptedUnit": {
"type": "object",
"properties": {
"value": {
"type": "string",
"example": "7OjLFw=="
},
"initializeVector": {
"type": "string",
"example": "5YVg="
},
"cipherText": {
"type": "string",
"example": "596F5AA48A882"
}
}
},
"DecryptedUnit": {
"type": "object",
"properties": {
"encrypted": {
"type": "string",
"example": "7OjLV="
},
"decrypted": {
"type": "string",
"example": "555-55-5555"
}
}
}
}
}
you need to configure PhysicalFileProvider and put your swagger.json into wwwroot or anywhere accessible by PhysicalFileProvider. After that you can access it using IFileProvider
Reference: https://www.c-sharpcorner.com/article/file-providers-in-asp-net-core/
Edit If you just add app.UseStaticFiles(); into your StartUp, you can access wwwroot without hastle.
Reference
Completely Different Approach
you may also consider to serve your file using Controller/Action
public IActionResult GetSwaggerDoc()
{
var file = Path.Combine(Directory.GetCurrentDirectory(),
"MyStaticFiles", "swagger.json");
return PhysicalFile(file, "application/json");
}
.NET Core 2.2 could server physical file to url resource like below.
But if you use custom swagger json, your api is fixed except you change it every time.
public void Configure(IApplicationBuilder app, IHostingEnvironment env,
ILoggerFactory loggerFactory)
{
...
app.UseStaticFiles(new StaticFileOptions
{
FileProvider = new PhysicalFileProvider(
Path.Combine(Directory.GetCurrentDirectory(),
"swagger/v1/swagger.json")),
RequestPath = "swagger/v1/swagger.json"
});
}

Resources