I have developed a net core 3.1 service for windows, the service works fine, but fails to write the log file.
during debugging Serilog writes the file correctly, but once installed with sc it writes nothing.
Program.cs
public class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.UseWindowsService()
.UseSerilog((hostingContext, loggerConfig) =>
loggerConfig.ReadFrom.Configuration(hostingContext.Configuration)) // custom log event
.ConfigureServices((hostContext, services) =>
{
IConfiguration configuration = hostContext.Configuration; //prendi la configurazione
ServiceInfo siOption = configuration.GetSection("ServiceInfo").Get<ServiceInfo>();
services.AddSingleton(siOption);
services.AddHostedService<Worker>();
});
}
appsettngs.json
"Serilog": {
"Using": [ "Serilog.Sinks.Console", "Serilog.Sinks.RollingFile" ],
"MinimumLevel": {
"Default": "Debug",
"Override": {
"Default": "Information",
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Information"
}
},
"WriteTo": [
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByIncludingOnly",
"Args": {
"expression": "(#Level = 'Error' or #Level = 'Fatal' or #Level = 'Warning')"
}
}
],
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "Logs/ex_.log",
"outputTemplate": "{Timestamp} [{Level:u3}] {Message}{NewLine}{Exception}",
//"outputTemplate": "{Timestamp:o} [{Level:u3}] ({SourceContext}) {Message}{NewLine}{Exception}",
"rollingInterval": "Day",
"retainedFileCountLimit": 7
}
}
]
}
}
}
],
"Enrich": [
"FromLogContext",
"WithMachineName"
],
"Properties": {
"Application": "ORAMS-II Service Status Telegram"
}
}
}
I don't know what the problem could be, installed on a linux machine writes the file correctly
Related
I used to ship my data to Elasticsearch by FileBeat-LogStash pipeline. Processed my logs which was created via log4net, mutated them, and sent required fields towards elastic.
Now I would like to replace my logic by removing the FileBeat and Logstash and make use of Serilog and it's elasticsearch sink.
To broader the picture I have an API endpoint which receives requests which I need to log to a textual file as they are so I need a File sink. Further down the code, my business logic will make use of data received and among else create an object which I then need to ingest to an index at elastic.
What's the best approach for this, have one Serilog instance and use some kind of filtering or have two Serilog instances? I'm closer to decorating (enrich) my cases and then using sinks by filtering (one Serilog instance) but because I'm a novice with Serilog I don't know how to set up the whole thing.
The abbreviated code would be something like this,
My controller class:
public class RequestController : ControllerBase
{
private readonly BLService _service = new BLService(Log.Logger);
[Route("Test")]
[HttpPost]
public IActionResult Test([FromBody]SampleRequest request)
{
var logId = Guid.NewGuid().ToString();
using (LogContext.PushProperty("LogId", logId))
Log.Information("{#request}", request);
var tran = new SampleTran
{
SampleTranType = "Test",
SampleTranId = request.Id,
EventTime = DateTime.Now
};
_service.ProcessTransaction(tran);
return new OkResult();
}
}
And my service where I'm adding property "Type" with constant value "ElkData" which I could then filter on:
public class BLService
{
private readonly ILogger _log;
public BLService(ILogger logger)
{
_log = logger.ForContext("Type", "ElkData");
}
public void ProcessTransaction(SampleTran transaction)
{
var elkData = DoSomeStuffAndReturnElkTransactionToStore(transaction);
_log.Information("{#ElkData}", elkData );
}
}
One note, my text file should only contain raw requests (without elasticsearch data). So far I'm writing all to file, and my appsettings.json looks like this:
{
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "C:\\DEV\\Logs\\mylog-.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:yyyy-MM-ddTHH:mm:ss.fff zzz} [{Level:u3}] {Message:j}{NewLine}{Exception}"
}
}
],
"Enrich": [ "FromLogContext" ]
},
"AllowedHosts": "*"
}
I need to add the elastic part using filtering, am I right? Any help would be appreciated.
Here's how I managed to do what I need:
I've used ForContext to enrich my log items. So in the controller, I used:
var requestLog = Log.ForContext("Type", "Request");
requestLog.Information("Request: {#request}", request);//this needs to go to the log file
the code in BLservice stays the same and the filtering is described in the appsettings.json as:
{
"Serilog": {
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"Microsoft.Hosting.Lifetime": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByExcluding",
"Args": {
"expression": "Type = 'ElkData'"
}
}
],
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "C:\\DEV\\Logs\\mylog-.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:yyyy-MM-ddTHH:mm:ss.fff zzz} [{Level:u3}] {Message:j}{NewLine}{Exception}",
"shared": true
}
}
]
}
}
},
{
"Name": "Logger",
"Args": {
"configureLogger": {
"Filter": [
{
"Name": "ByIncludingOnly",
"Args": {
"expression": "Type = 'ElkData'"
}
}
],
"WriteTo": [
{
"Name": "Elasticsearch",
"Args": {
"nodeUris": "<your elastic url>",
"TypeName": "_doc",
"IndexFormat": "serilog_data",
"InlineFields": true,
"BufferBaseFilename": "C:\\DEV\\Logs\\elk_buffer"
}
}
]
}
}
}
]
}
}
So the file will contain everything that is logged out except logs that carry "Type = 'ElkData'" enrichment, those will end up in elasticsearch index.
Hope this simple approach will help some serilog novice out there someday
My C# dotnet core web application uses serilog with elasticsearch which is configured like the code below. Whenever elastic search server has a problem my server's Http queries slow down to being unsusable. Let's say if elasticsearch is down each query takes 15 seconds or so. I assume each query tries to log the message and waits for the elasticserch server to respond. I understand that I can play with elasticsearch timeout but that is not a solution. Is there a better way not to limit the application performance by the logging method ?
"Serilog": {
"IncludeScopes": true,
"MinimumLevel": {
"Default": "Information",
"Override": {
"Microsoft": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{
"Name": "File",
"Args": {
"path": "....\\log.txt",
"rollingInterval": "Day",
"outputTemplate": "{Timestamp:o} [{Level:u3}] {Message}{NewLine}{Exception}"
}
},
{
"Name": "Elasticsearch",
"Args": {
"nodeUris": "http://localhost:9200",
"typeName": "SomeApp",
"batchPostingLimit": 50,
"restrictedToMinimumLevel": "Information",
"bufferFileSizeLimitBytes": 5242880,
"bufferLogShippingInterval": 5000,
"bufferRetainedInvalidPayloadsLimitBytes": 5000,
"bufferFileCountLimit": 31,
"connectionTimeout": 5,
"queueSizeLimit": "100000",
"autoRegisterTemplate": true,
"overwriteTemplate": false
}
}
],
"Enrich": [
"FromLogContext"
],
"Properties": {
"Application": "SomeApp",
"Environment": "SomeApp.Production"
}
}
The middleware part looks like this
public async Task Invoke(HttpContext context, ILogger logger, ILogContext logContext)
{
context.Request.EnableBuffering();
var stopWatch = new Stopwatch();
stopWatch.Start();
try
{
await _next.Invoke(context);
}
finally
{
stopWatch.Stop();
await LogRequest(context, logger, stopWatch.ElapsedMilliseconds, logContext);
}
}
private async Task LogRequest(HttpContext context, ILogger logger, long elapsedMs, ILogContext logContext){
logger.Information(LogConstants.RequestMessageTemplate, message);
}
I'm running a business network on IBM Cloud Blockchain Platform 2.0. I downloaded a connection profile from the instantiated smart contracts section on the Platform Console, and am trying to connect using the Fabric SDK for Go but I am facing an error.
This is my current relevant code, under main
configOpts := fabricConfig.FromFile("./profiles/flex.json")
fabSDK, err := fabsdk.New(configOpts)
if err != nil {
entry.WithError(err).Fatal("Error setting up Fabric SDK")
}
defer fabSDK.Close()
Expected: Connects to the business network instance
Actual: I get an error
FATA[0000] Error setting up Fabric SDK env=DEV
error="failed to initialize configuration: unable to initialize cryptosuite using crypto suite config:
failed to initialize crypto suite: Unsupported BCCSP Provider: "
I add "cryptoconfig" path to the client.
BCCSP is optional, and you can remove.
I added the sample config, please see below.
More detail config at Github go-SDK config.yaml
{
"name": "first-network-org1",
"version": "1.0.0",
"client": {
"organization": "Org1",
"logging": {
"level": "info"
},
"cryptoconfig": {
"path": "crypto-config"
},
"credentialStore": {
"path": "/tmp/keystore",
"cryptoStore": {
"path": "/tmp/msp"
}
},
"connection": {
"timeout": {
"peer": {
"endorser": "300"
}
}
}
},
"channels": {
"mychannel": {
"peers": {
"peer0.org1.example.com": {}
}
}
},
"orderers": {
"orderer.example.com": {
"url": "orderer.example.com:7050",
"grpcOptions": {
"ssl-target-name-override": "orderer.example.com",
"keep-alive-time": "60s",
"keep-alive-timeout": "60s",
"keep-alive-permit": false,
"fail-fast": false,
"allow-insecure": false
},
"tlsCACerts": {
"path": "crypto-config/ordererOrganizations/example.com/tlsca/tlsca.example.com-cert.pem"
}
}
},
"organizations": {
"Org1": {
"mspid": "Org1MSP",
"cryptoPath": "crypto-config/peerOrganizations/org1.example.com/users/User1#org1.example.com/msp",
"peers": [
"peer0.org1.example.com",
"peer1.org1.example.com"
],
"certificateAuthorities": [
"ca.org1.example.com"
]
}
},
"peers": {
"peer0.org1.example.com": {
"url": "grpcs://localhost:7051",
"tlsCACerts": {
"path": "crypto-config/peerOrganizations/org1.example.com/tlsca/tlsca.org1.example.com-cert.pem"
},
"grpcOptions": {
"ssl-target-name-override": "peer0.org1.example.com"
}
},
"peer1.org1.example.com": {
"url": "grpcs://localhost:8051",
"tlsCACerts": {
"path": "crypto-config/peerOrganizations/org1.example.com/tlsca/tlsca.org1.example.com-cert.pem"
},
"grpcOptions": {
"ssl-target-name-override": "peer1.org1.example.com"
}
}
},
"certificateAuthorities": {
"ca.org1.example.com": {
"url": "https://localhost:7054",
"caName": "ca-org1",
"tlsCACerts": {
"path": "crypto-config/peerOrganizations/org1.example.com/tlsca/tlsca.org1.example.com-cert.pem"
},
"httpOptions": {
"verify": false
}
}
}
}
Pretty sure I am missing something clearly obvious but not seeing it.
How can I use my updated swagger.json file?
I took my boilerplate swagger/v1/swagger.json code and pasted it into the editor.swagger.io system. I then updated the descriptions etc, added examples to my models and then saved the contents as swagger.json.
Moved the file into the root of my api application, set the file to copy always.
public void ConfigureServices(IServiceCollection services)
{...
services.AddSwaggerGen(c => { c.SwaggerDoc("V1", new Info {Title = "Decrypto", Version = "0.0"}); });
}
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
...
app.UseSwagger();
//--the default works fine
// app.UseSwaggerUI(c => { c.SwaggerEndpoint("/swagger/V1/swagger.json", "Decrypto v1"); });
app.UseSwaggerUI(c => { c.SwaggerEndpoint("swagger.json", "Decrypto v1"); });
app.UseMvc();
}
I have tried a few different variation but none seem to be the trick. I don't really want to rewrite the work in SwaggerDoc as it seems dirty to me put documentation in the runtime.
the custom swagger.json file I want to use looks like this:
{
"swagger": "2.0",
"info": {
"version": "0.0",
"title": "My Title"
},
"paths": {
"/api/Decryption": {
"post": {
"tags": [
"API for taking encrypted values and getting the decrypted values back"
],
"summary": "",
"description": "",
"operationId": "Post",
"consumes": [
"application/json-patch+json",
"application/json",
"text/json",
"application/*+json"
],
"produces": [
"text/plain",
"application/json",
"text/json"
],
"parameters": [
{
"name": "units",
"in": "body",
"required": true,
"schema": {
"uniqueItems": false,
"type": "array",
"items": {
"$ref": "#/definitions/EncryptedUnit"
}
}
}
],
"responses": {
"200": {
"description": "Success",
"schema": {
"uniqueItems": false,
"type": "array",
"items": {
"$ref": "#/definitions/DecryptedUnit"
}
}
}
}
}
}
},
"definitions": {
"EncryptedUnit": {
"type": "object",
"properties": {
"value": {
"type": "string",
"example": "7OjLFw=="
},
"initializeVector": {
"type": "string",
"example": "5YVg="
},
"cipherText": {
"type": "string",
"example": "596F5AA48A882"
}
}
},
"DecryptedUnit": {
"type": "object",
"properties": {
"encrypted": {
"type": "string",
"example": "7OjLV="
},
"decrypted": {
"type": "string",
"example": "555-55-5555"
}
}
}
}
}
you need to configure PhysicalFileProvider and put your swagger.json into wwwroot or anywhere accessible by PhysicalFileProvider. After that you can access it using IFileProvider
Reference: https://www.c-sharpcorner.com/article/file-providers-in-asp-net-core/
Edit If you just add app.UseStaticFiles(); into your StartUp, you can access wwwroot without hastle.
Reference
Completely Different Approach
you may also consider to serve your file using Controller/Action
public IActionResult GetSwaggerDoc()
{
var file = Path.Combine(Directory.GetCurrentDirectory(),
"MyStaticFiles", "swagger.json");
return PhysicalFile(file, "application/json");
}
.NET Core 2.2 could server physical file to url resource like below.
But if you use custom swagger json, your api is fixed except you change it every time.
public void Configure(IApplicationBuilder app, IHostingEnvironment env,
ILoggerFactory loggerFactory)
{
...
app.UseStaticFiles(new StaticFileOptions
{
FileProvider = new PhysicalFileProvider(
Path.Combine(Directory.GetCurrentDirectory(),
"swagger/v1/swagger.json")),
RequestPath = "swagger/v1/swagger.json"
});
}
i need to configure appsettings to use serilog log and to separate same classes (identityserver4, query sql, application ) and redirect the output in dfferent file to analyze tha program flow.
I've installed serilog, serilog.settings.configuration, serilog.sinks.rollingfile, serilog.filters.extensions and serilog.sinks.console but i've not found some documentation to do that.
This is my Serilog section in appsetting:
"Serilog": {
"MinimumLevel": {
"Default": "Debug",
"Override": {
"Microsoft": "Warning",
"System": "Warning"
}
},
"WriteTo": [
{ "Name": "LiterateConsole" },
{
"Name": "RollingFile",
"Args": { "pathFormat": "Logs/log-{Date}.txt" }
},
{
"Name": "RollingFile",
"pathFormat": "Logs/DBCommands-{Date}.log",
"Filter": [
{
"Name": "ByIncludingOnly",
"Args": {
"expression": "SourceContext = 'IdentityServer4'"
}
}
]
}
],
"Enrich": [ "FromLogContext", "WithMachineName", "WithThreadId" ],
"Properties": {
"Application": "dsm.security"
}
}
Where i'm wrong ?
UPDATE
I would like to have the same result that i can obtain with the following code
Log.Logger = new LoggerConfiguration()
.MinimumLevel.Debug()
.ReadFrom.Configuration(Configuration)
.MinimumLevel.Override("Microsoft", LogEventLevel.Information)
.MinimumLevel.Override("System", LogEventLevel.Warning)
.MinimumLevel.Override("Default", LogEventLevel.Warning)
.Enrich.FromLogContext()
.WriteTo.Console(theme: AnsiConsoleTheme.Code)
.WriteTo.Logger(l => l.Filter.ByIncludingOnly(e => e.Level == LogEventLevel.Error).WriteTo.RollingFile(#"Logs/Error-{Date}.log"))
.WriteTo.Logger(l => l.Filter.ByIncludingOnly(e => e.Level == LogEventLevel.Fatal).WriteTo.RollingFile(#"Logs/Fatal-{Date}.log"))
.WriteTo.Logger(l => l.Filter.ByIncludingOnly(Matching.FromSource("dsm.security")).WriteTo.RollingFile(#"Logs/dsm.security-{Date}.log"))
.WriteTo.Logger(l => l.Filter.ByIncludingOnly(Matching.FromSource("IdentityServer4")).WriteTo.RollingFile(#"Logs/IdentityServer-{Date}.log"))
.WriteTo.Logger(l => l.Filter.ByIncludingOnly(Matching.FromSource("Microsoft.EntityFrameworkCore")).WriteTo.RollingFile(#"Logs/EF-{Date}.log"))
// .WriteTo.RollingFile(#"Logs/Verbose-{Date}.log")
.CreateLogger();