Problem with schema validation using Postman - validation

Body of my req:
[
{
"postId": 1,
"id": 1,
"name": "name abc",
"email": "Eliseo#gardner.biz",
"body": "something"
},
...
]
I am trying to validate it like below:
var schema = {
"type": "array",
"properties": {
"postId": {
"type": "integer"
},
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"email": {
"type": "string",
"pattern": "^[A-Z0-9._%+-]+#[A-Z0-9.-]+\.[A-Z]{2,}$"
},
"body": {
"type": "string"
}
},
"required": [
"postId",
"id",
"name",
"email",
"body"
]
};
pm.test('Schemat jest poprawny', function() {
pm.expect(tv4.validate(jsonData, schema)).to.be.true;
});
The test is ok even if I change for example id type for string or email pattern for invalid one.
What is wrong with that code?

I would recommend moving away from tv4 for schema validations and use the built-in jsonSchema function, as this uses AJV.
Apart from that, your schema didn't look right and was missing the validation against the object, it looks like it was doing it against the array.
This might help you out:
let schema = {
"type": "array",
"items": {
"type": "object",
"required": [
"postId",
"id",
"name",
"email",
"body"
],
"properties": {
"postId": {
"type": "integer"
},
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"email": {
"type": "string",
"pattern": "^[A-Z0-9._%+-]+#[A-Z0-9.-]+\.[A-Z]{2,}$"
},
"body": {
"type": "string"
}
}
}
}
pm.test("Schemat jest poprawny", () => {
pm.response.to.have.jsonSchema(schema)
})

Related

How to validate an array of different objects using dry-schema gem

Given I have such JSON object having an array of various objects like:
{
"array": [
{
"type": "type_1",
"value": 5
},
{
"type": "type_2",
"kind": "person"
}
]
}
According to JSON schema validation, I can validate this schema using this JSOM schema definition:
{
"type": "object",
"properties": {
"array": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"type_1"
]
},
"value": {
"type": "integer",
"enum": [
5
]
}
}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"type_2"
]
},
"kind": {
"type": "string",
"enum": [
"person"
]
}
}
}
]
}
}
}
}
How can I validate the input JSON using the dry-schema gem? Do you have any ideas?

How I can transform a field of the Jsonfile witn NiFi?

Good morning
I am new in NiFi and I want modify a field in a JSON file (I am using NiFi v.1.12.0) and save it in other PATH.
This is and example of my JSON file:
"Id": "2b2ef24a-f3ce-4249-ad92-db9a565b5b66",
"Tipo": "AuditEvent",
"SubTipo": "Plataforma",
"Accion": "Audit.Middleware.EventData.HttpResponseSentEvent",
"IDCorrelacion": "7af48a20-587d-4e60-9c3b-02cc6a074662",
"TiempoEvento": "2020-07-30 11:45:08.315",
"Resultado": "No informado",
"ResultadoDesc": "No informado",
"Origen": {
"IDOrigen": "132403308182038429",
"Tipo": "Backend",
"Aplicacion": "fabric:/Omnicanalidad.Canales.Muro_v1",
"Servicio": "fabric:/Omnicanalidad.Canales.Muro_v1/Muro",
"Maquina": "ibsfabbe02pru",
"IP": "ibsfabbe02pru"
},
"OrigenInterno": "Audit.Middleware.AuditMiddleware",
"Agente": {
"Rol": "Sin rol asignado",
"IDUsuario": "1428",
"AltIDUsuario": "20141115",
"Localizador": "197.183.27.17",
"PropositoUso": "No informado",
"IDSession": "",
"XForwardedPort": "443",
"XForwardedFor": "162.37.0.100:30279, 162.37.0.5:10158, 172.37.0.5",
"XForwardedHost": "ebeprate.es",
"XForwardedProto": "https",
"XOriginalURL": "/test/v1/Relation/ObtieneGestor?IdUser=4355625&NiciTitular=43485326",
"XOriginalHost": "ebeprate.es",
"Referer": null,
"AuthenticationType": "AuthenticationTypes.Federation",
"UserAgent": "HttpApplicationGateway",
"Claims": "Hello World",
"AcceptedLanguage": null
},
"DatosEvento": {
"Headers": ["Content-Length: 0", "Request-Context: appId=cid-v1:d8b40be1-4838-4a94-a4f8-3ec374989b27"],
"StatusCode": 204,
"Body": ""
}
}
I want modify the field TiempoEvento from date to timestamp.
In this case 2020-07-30 11:45:08.315 convert to 1596109508
So I use this procedure:
1.- I used the GetFile Processor for take the file. I configure the properties (without any problems) and everything it is ok.
2.- I used UpdateRecord Processor to modify the field. (The problems appears)
In properties I have 3 properties:
I read that I need configure a schema-registry if I want to work with any data in NiFi (I don't know if it is totaly true). In this case how I am working with a JsonFile I supposed that I need it, so I did it.
In controller service I configure JsonPathReader, JsonRecordSetWriter and AvroSchemaRegistry.
I started with AvroSchemaRegistry.
SETTING
Name: Test
PROPERTIES
Validate Field Names -> true
test-schema ->
{
"name": "MyFirstNiFiTest",
"type": "record",
"namespace": "test.example",
"fields": [
{
"name": "Id",
"type": "string"
},
{
"name": "Tipo",
"type": "string"
},
{
"name": "SubTipo",
"type": "string"
},
{
"name": "Accion",
"type": "string"
},
{
"name": "IDCorrelacion",
"type": "string"
},
{
"name": "TiempoEvento",
"type": "string"
},
{
"name": "Resultado",
"type": "string"
},
{
"name": "ResultadoDesc",
"type": "string"
},
{
"name": "Origen",
"type": {
"name": "Origen",
"type": "record",
"fields": [
{
"name": "IDOrigen",
"type": "string"
},
{
"name": "Tipo",
"type": "string"
},
{
"name": "Aplicacion",
"type": "string"
},
{
"name": "Servicio",
"type": "string"
},
{
"name": "Maquina",
"type": "string"
},
{
"name": "IP",
"type": "string"
}
]
}
},
{
"name": "OrigenInterno",
"type": "string"
},
{
"name": "Agente",
"type": {
"name": "Agente",
"type": "record",
"fields": [
{
"name": "Rol",
"type": "string"
},
{
"name": "IDUsuario",
"type": "string"
},
{
"name": "AltIDUsuario",
"type": "string"
},
{
"name": "Localizador",
"type": "string"
},
{
"name": "PropositoUso",
"type": "string"
},
{
"name": "IDSession",
"type": "string"
},
{
"name": "XForwardedPort",
"type": "string"
},
{
"name": "XForwardedFor",
"type": "string"
},
{
"name": "XForwardedHost",
"type": "string"
},
{
"name": "XForwardedProto",
"type": "string"
},
{
"name": "XOriginalURL",
"type": "string"
},
{
"name": "XOriginalHost",
"type": "string"
},
{
"name": "Referer",
"type": [
"string",
"null"
]
},
{
"name": "AuthenticationType",
"type": [
"string",
"null"
]
},
{
"name": "UserAgent",
"type": "string"
},
{
"name": "Claims",
"type": "string"
},
{
"name": "AcceptedLanguage",
"type": [
"string",
"null"
]
}
]
}
},
{
"name": "DatosEvento",
"type": {
"name": "DatosEvento",
"type": "record",
"fields": [
{
"name": "Name",
"type": "string"
},
{
"name": "Category",
"type": "string"
},
{
"name": "EventType",
"type": "int"
},
{
"name": "Id",
"type": "int"
},
{
"name": "ApiName",
"type": "string"
},
{
"name": "Token",
"type": "string"
},
{
"name": "ApiScopes",
"type": {
"type": "array",
"items": "string"
}
},
{
"name": "TokenScopes",
"type": {
"type": "array",
"items": "string"
}
},
{
"name": "Message",
"type": "string"
},
{
"name": "ActivityId",
"type": "string"
},
{
"name": "TimeStamp",
"type": "int",
"logicalType": "date"
},
{
"name": "ProcessId",
"type": "int"
},
{
"name": "LocalIpAddress",
"type": "string"
},
{
"name": "RemoteIpAddress",
"type": "string"
}
]
}
}
]
}
I converted the JSON file to avroSchema
I enable it and everything it is OK.
Then I configure the JsonRecordSetWrite:
SETTING
Name: TestRecordSetWriter
PROPERTIES
I enable it and everything it is OK.
and then I configue de JsonPathReader
SETTING
Name: TestPathReader
PROPERTIES
And in this point I have and alert that said:
'JSON paths' is invalid bacause No JSON Paths were specified
and I can't enable this controller services, and I don't know what am I missing?
I don't know if there are another way to do it easier. I don't know if I am totally wrong. So I need some help.
Thank you
I found the answer. I has a bad configuration in JsonPathreader, because I had not configured the records of the schema in the properties.

How to use "where" clausule with graphQL in OpenAPI-to-GraphQL server?

I'm using LoopBack 4 with oasgraph (renamed to OpenAPI-to-GraphQL).
One of my OpenAPI endpoint has a filter parameter with the following schema :
"parameters": [
{
"name": "filter",
"in": "query",
"style": "deepObject",
"explode": true,
"schema": {
"properties": {
"where": {
"type": "object"
},
"fields": {
"type": "object",
"properties": {
"id": {
"type": "boolean"
},
"idOwner": {
"type": "boolean"
},
"createdTimestamp": {
"type": "boolean"
},
"modifiedTimestamp": {
"type": "boolean"
},
"idUserCreated": {
"type": "boolean"
},
"idUserModified": {
"type": "boolean"
},
"value": {
"type": "boolean"
},
"dicContactId": {
"type": "boolean"
},
"counterpartyId": {
"type": "boolean"
}
}
},
"offset": {
"type": "integer",
"minimum": 0
},
"limit": {
"type": "integer",
"minimum": 0
},
"skip": {
"type": "integer",
"minimum": 0
},
"order": {
"type": "array",
"items": {
"type": "string"
}
},
"include": {
"type": "array",
"items": {
"type": "object",
"properties": {
"relation": {
"type": "string"
},
"scope": {
"properties": {
"where": {
"type": "object"
},
"fields": {
"type": "object",
"properties": {}
},
"offset": {
"type": "integer",
"minimum": 0
},
"limit": {
"type": "integer",
"minimum": 0
},
"skip": {
"type": "integer",
"minimum": 0
},
"order": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
}
}
}
},
"type": "object"
}
}
],
As you can see the where properity is of a type "object". However in graphQL editor it expects a String:
graphql editor - expected type string
The problem is that the string produces an error when I run a query:
graphql editor - where clause is not an object
As a result, I'm not able to perform a query with where clause.
You can us npm qs node module to stringify your where clause object. Beacuse Loopback is using qs under the hood to parse query string.
import * as qs from 'qs';
let query = {
// where condition
}
qs.stringify(query, { addQueryPrefix: true });
You can find more info about qs here
Loopback4 query string issue discussion:- https://github.com/strongloop/loopback-next/issues/2468

In MS Flow, how do I loop through an array and extract values from array?

I have http rest result in this format:
{
"type": "object",
"properties": {
"page": {
"type": "object",
"properties": {
"total": {
"type": "integer"
}
}
},
"list": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"type": {
"type": "string"
},
"status": {
"type": "string"
}
},
"required": [
"id",
"type",
"status"
]
}
}
}
}
I am trying to loop through each item in "list" and extract the id, type, status. How do I do this in MS Flow? Here is what I got:
As you can see the variables are not in the dynamic content picker, how do I get it to show up?

Elasticsearch mapping parser exception?

I'm trying to create a nested document in Elasticsearch.
Structure:
title,name,comments
comments is a nested document - inside that - Comment & Star_Rating.
Inside Comment, name and address.
Here is the query mentioned below.
PUT /sounduu
{
"mappings": {
"blogpost": {
"properties": {
"title": {
"type": "string"
},
"name": {
"type": "string"
},
"comments": {
"properties": {
"comment": {
"properties": {
"name": {
"type": "string"
},
"address": {
"type": "string"
}
}
},
"star_rating": {
"type": "long"
}
}
}
}
}
}
}
PUT /sounduu/blogpost/1
{
"title": "someh_title",
"name":"soundy",
"comments": {
"comment":"kuu",
[{
"name":"juwww",
"address":"eeeey"
},
{
"name":"jj",
"address":oo"
}]
},
"star_rating":6
}
Error :
{
"error": {
"root_cause": [
{
"type": "mapper_parsing_exception",
"reason": "object mapping for [comments.comment] tried to parse field [comment] as object, but found a concrete value"
}
],
"type": "mapper_parsing_exception",
"reason": "object mapping for [comments.comment] tried to parse field [comment] as object, but found a concrete value"
},
"status": 400
}
Can anyone help with this?
In your PUT /sounduu/blogpost/1 request you are attempting to treat the "comment" property as both a nested object and a string.
Formatting your request's JSON, you can observe the issue:
{
"title": "someh_title",
"name": "soundy",
"comments": {
"comment": "kuu",
[{
"name": "juwww",
"address": "eeeey"
},
{
"name": "jj",
"address": oo"
}]
},
"star_rating":6
}
You either need to update your mapping to include a "text" property, and move the "comment": "kuu" content accordingly, or omit it from your request to work with your current mapping.
Example here - For me it seems logical to group everything like so:
PUT /sounduu
{
"mappings": {
"blogpost": {
"properties": {
"title": {
"type": "string"
},
"name": {
"type": "string"
},
"comments": {
"properties": {
"text" : {
"type": "string"
},
"name": {
"type": "string"
},
"address": {
"type": "string"
}
}
},
"star_rating": {
"type": "long"
}
}
}
}
}
The indexing request would then look like:
{
"title": "someh_title",
"name": "soundy",
"comments": [
{
"text": "kuu",
"name": "juwww",
"address": "eeeey"
},
{
"text": "kuu2",
"name": "jj",
"address": oo"
}
],
"star_rating":6
}
If you are using elasticSearch higher version, then it is recommended to replace 'string' data type with 'text'. ElasticSearch community has discarded 'string'.
Reformed request should be :
`PUT /sounduu
{
"mappings": {
"blogpost": {
"properties": {
"title": {
"type": "text"
},
"name": {
"type": "text"
},
"comments": {
"properties": {
"text" : {
"type": "text"
},
"name": {
"type": "text"
},
"address": {
"type": "text"
}
}
},
"star_rating": {
"type": "long"
}
}
}
}
}`

Resources