Related
I use rest api to create a simple product on my magento. All works correctly except quantity column that is correctly update on "Quantity" column but not on the salable quantity.
Rest call: "www.mysite.com/V1/products"
Array data
$data = [
"product" => [
"sku" => $sku,
"name" => $product_title,
"attribute_set_id" => 4,
"price" => $price,
"status" => 1,
"visibility" => 4,
"type_id" => "simple",
"weight" => "1",
"extension_attributes" => [
"category_links" => [
[
"position" => 0,
"category_id" => "53"
]
],
"stock_item" => [
"qty" => $qty,
"is_in_stock" => true
]
],
"custom_attributes" => [
[
"attribute_code" => "special_price",
"value" => $salable_price
],
[
"attribute_code" => "special_from_date",
"value" => "2021-02-07 00:00:00"
],
[
"attribute_code" => "special_to_date",
"value" => "2091-02-07 00:00:00"
],
[
"attribute_code" => "cost",
"value" => $salable_price
],
[
"attribute_code" => "description",
"value" => $description
],
[
"attribute_code" => "short_description",
"value" => $short_description
]
]
]
];
As you see, qty has been correctly update on qty column but not on the salable quantity. What is my mistake?
Please try this:
"stockItem": {
"is_in_stock": 1,
"qty": 10,
"manage_stock": true,
"use_config_manage_stock": 1
}
I have the following array of hashes as input:
ar = [{"Sales"=>"11", "CustID"=>"Cust04"},
{"Sales"=>"44.9", "CustID"=>"Cust04"},
{"Sales"=>"79.17", "CustID"=>"Cust06"},
{"Sales"=>"73.84", "CustID"=>"Cust06"},
{"Sales"=>"34.9", "CustID"=>"Cust06"},
{"Sales"=>"29.6825", "CustID"=>"Cust06"},
{"Sales"=>"2048.7", "CustID"=>"Cust06"},
{"Sales"=>"15.24", "CustID"=>"Cust02"},
{"Sales"=>"54.874", "CustID"=>"Cust04"},
{"Sales"=>"12.79", "CustID"=>"Cust08"},
{"Sales"=>"22.65", "CustID"=>"Cust08"},
{"Sales"=>"63.26", "CustID"=>"Cust08"},
{"Sales"=>"16.207", "CustID"=>"Cust08"},
{"Sales"=>"782.2", "CustID"=>"Cust07"},
{"Sales"=>"215.45", "CustID"=>"Cust07"},
{"Sales"=>"781.23", "CustID"=>"Cust07"},
{"Sales"=>"370.14", "CustID"=>"Cust07"},
{"Sales"=>"1.7", "CustID"=>"Cust09"},
{"Sales"=>"22.405", "CustID"=>"Cust09"}
]
I am looking for an output as below, based on the total of sales, with a rank:
ar_out # =>
["Customer" => "Cust04", "TotalSales" => "xxxx", "Rank" => "1"]
You can try this way
Input
ar = [
{ "Sales" => "11", "CustID" => "Cust04" },
{ "Sales" => "44.9", "CustID" => "Cust04" },
{ "Sales" => "79.17", "CustID" => "Cust06" },
{ "Sales" => "73.84", "CustID" => "Cust06" },
{ "Sales" => "34.9", "CustID" => "Cust06" },
{ "Sales" => "29.6825", "CustID" => "Cust06" },
{ "Sales" => "2048.7", "CustID" => "Cust06" },
{ "Sales" => "15.24", "CustID" => "Cust02" },
{ "Sales" => "54.874", "CustID" => "Cust04" },
{ "Sales" => "12.79", "CustID" => "Cust08" },
{ "Sales" => "22.65", "CustID" => "Cust08" },
{ "Sales" => "63.26", "CustID" => "Cust08" },
{ "Sales" => "16.207", "CustID" => "Cust08" },
{ "Sales" => "782.2", "CustID" => "Cust07" },
{ "Sales" => "215.45", "CustID" => "Cust07" },
{ "Sales" => "781.23", "CustID" => "Cust07" },
{ "Sales" => "370.14", "CustID" => "Cust07" },
{ "Sales" => "1.7", "CustID" => "Cust09" },
{ "Sales" => "22.405", "CustID" => "Cust09" }
]
Proces
ar.each_with_object(Hash.new(0)) { |hsh, e| e[hsh['CustID']] += hsh['Sales'].to_f }.
sort_by { |_, v| -v }.
map.with_index { |(k, v), i| [{ 'Customer' => k, 'TotalSales' => v, 'Rank' => i + 1 }] }
Output
[
[
{
"Customer": "Cust06",
"TotalSales": 2266.2925,
"Rank": 1
}
],
[
{
"Customer": "Cust07",
"TotalSales": 2149.02,
"Rank": 2
}
],
[
{
"Customer": "Cust08",
"TotalSales": 114.90699999999998,
"Rank": 3
}
],
[
{
"Customer": "Cust04",
"TotalSales": 110.774,
"Rank": 4
}
],
[
{
"Customer": "Cust09",
"TotalSales": 24.105,
"Rank": 5
}
],
[
{
"Customer": "Cust02",
"TotalSales": 15.24,
"Rank": 6
}
]
]
ar.
each_with_object({}) do |hash, acc|
(acc[hash["CustID"]] ||= {"Customer" => hash["CustID"], "TotalSales" => 0}).
tap { |h| h["TotalSales"] += hash["Sales"].to_f }
end.
values.
sort_by { |h| -h["TotalSales"] }.
map.
with_index(1) { |h, idx| h.merge("Rank" => idx) }
#⇒ [{"Customer"=>"Cust06", "TotalSales"=>2266.2925, "Rank"=>1},
# {"Customer"=>"Cust07", "TotalSales"=>2149.02, "Rank"=>2},
# {"Customer"=>"Cust08", "TotalSales"=>114.90699999999998, "Rank"=>3},
# {"Customer"=>"Cust04", "TotalSales"=>110.774, "Rank"=>4},
# {"Customer"=>"Cust09", "TotalSales"=>24.105, "Rank"=>5},
# {"Customer"=>"Cust02", "TotalSales"=>15.24, "Rank"=>6}]
Please I have a json Object which results from an xml input, it looks like this :
{
"#version" => "1",
"#timestamp" => "2016-04-11T15:35:07.372Z",
"host" => "YUSUF-PC",
"command" => "nana",
"doc" => {
"TotalResults" => "1892",
"Audit" => [
[0] {
"Id" => "2260167",
"Action" => "UPDATE",
"ParentId" => "30612",
"ParentType" => "defect",
"Time" => "2016-01-04 08:27:59",
"User" => "nana",
"Properties" => {
"Property" => [
[0] {
"Label" => "Statut",
"Name" => "status",
"NewValue" => [
[0] "En cours"
]
},
[1] {
"Label" => "Affecté à",
"Name" => "owner",
"NewValue" => [
[0] "nana"
]
},
[2] {
"Label" => "Priorité",
"Name" => "severity",
"NewValue" => [
[0] "nana"
]
}
]
}
},
[1] {
"Id" => "2260168",
"Action" => "UPDATE",
"ParentId" => "30612",
"ParentType" => "defect",
"Time" => "2016-01-04 09:45:33",
"User" => "nana",
"Properties" => {
"Property" => [
[0] {
"Label" => "Affecté à",
"Name" => "owner",
"NewValue" => [
[0] "nana"
],
"OldValue" => [
[0] "nana"
]
}
]
}
}
]
} }
I need to split this json to properties, ie to get each document containing one property, the problem is not the split operation, but when I insert this to elasticsearch, the "NewValue" field doesn't get into account... So I need to write a ruby filter to alter the value to value[0]. Anyone can help, I'm not good at ruby ?
I want to get a json like this one :
{
"#version" => "1",
"#timestamp" => "2016-04-11T15:35:07.372Z",
"host" => "YUSUF-PC",
"command" => "nana",
"doc" => {
"TotalResults" => "1892",
"Audit" => [
[0] {
"Id" => "2260167",
"Action" => "UPDATE",
"ParentId" => "30612",
"ParentType" => "defect",
"Time" => "2016-01-04 08:27:59",
"User" => "nana",
"Properties" => {
"Property" =>
{
"Label" => "Statut",
"Name" => "status",
"NewValue" => "En cours"
}
}
}
]
}
}
Thank you
I hope this helps.
old = {
"#version" => "1",
"#timestamp" => "2016-04-11T15:35:07.372Z",
"host" => "YUSUF-PC",
"command" => "nana",
"doc" => {
"TotalResults" => "1892",
"Audit" => [
{
"Id" => "2260167",
"Action" => "UPDATE",
"ParentId" => "30612",
"ParentType" => "defect",
"Time" => "2016-01-04 08:27:59",
"User" => "nana",
"Properties" => {
"Property" => [
{
"Label" => "Statut",
"Name" => "status",
"NewValue" => [
"En cours"
]
},
{
"Label" => "Affecté à",
"Name" => "owner",
"NewValue" => [
"nana"
]
},
{
"Label" => "Priorité",
"Name" => "severity",
"NewValue" => [
"nana"
]
}
]
}
},
{
"Id" => "2260168",
"Action" => "UPDATE",
"ParentId" => "30612",
"ParentType" => "defect",
"Time" => "2016-01-04 09:45:33",
"User" => "nana",
"Properties" => {
"Property" => [
{
"Label" => "Affecté à",
"Name" => "owner",
"NewValue" => [
"nana"
],
"OldValue" => [
"nana"
]
}
]
}
}
]
} }
##THIS IS THE LINE ACTUALLY DOING WORK.
old["doc"]["Audit"].map{|prop| prop["Properties"]["Property"].map{|value| value['NewValue']= value['NewValue'].first} }
old
=> {"#version"=>"1", "#timestamp"=>"2016-04-11T15:35:07.372Z", "host"=>"YUSUF-PC", "command"=>"nana", "doc"=>{"TotalResults"=>"1892", "Audit"=>[{"Id"=>"2260167", "Action"=>"UPDATE", "ParentId"=>"30612", "ParentType"=>"defect", "Time"=>"2016-01-04 08:27:59", "User"=>"nana", "Properties"=>{"Property"=>[{"Label"=>"Statut", "Name"=>"status", "NewValue"=>"En cours"}, {"Label"=>"Affecté à", "Name"=>"owner", "NewValue"=>"nana"}, {"Label"=>"Priorité", "Name"=>"severity", "NewValue"=>"nana"}]}}, {"Id"=>"2260168", "Action"=>"UPDATE", "ParentId"=>"30612", "ParentType"=>"defect", "Time"=>"2016-01-04 09:45:33", "User"=>"nana", "Properties"=>{"Property"=>[{"Label"=>"Affecté à", "Name"=>"owner", "NewValue"=>"nana", "OldValue"=>["nana"]}]}}]}}
I am working with a large data set with multiple arrays of hashes which all have a key-value pair in common ("date" & the date value) as the first element of the hash.
The array of hashes I need to parse (#data["snapshot"]) is in the following format. Note that #data["snapshot"][0], #data["snapshot"][1], and #data["snapshot"][2] are in identical format with identical dates but their total's differ. In the resulting hash I need to have a key-value pair which identifies where the data came from.
#data["snapshot"][0] is as follows:
[{"date"=>"1455672010", "total"=>"**817**", "I"=>"1", "L"=>"3", "M"=>"62", "H"=>"5", "C"=>"0"},
{"date"=>"1455595298", "total"=>"**40**", "I"=>"8", "L"=>"5", "M"=>"562", "H"=>"125", "C"=>"0"},
{"date"=>"1455336016", "total"=>"**555**", "I"=>"10", "L"=>"1", "M"=>"93", "H"=>"121", "C"=>"0"}]
#data["snapshot"][1] is as follows:
[{"date"=>"1455672010", "total"=>"**70**", "I"=>"1", "L"=>"9", "M"=>"56", "H"=>"25", "C"=>"0"},
{"date"=>"1455595298", "total"=>"**54**", "I"=>"8", "L"=>"2", "M"=>"5", "H"=>"5", "C"=>"0"},
{"date"=>"1455336016", "total"=>"**25**", "I"=>"0", "L"=>"9", "M"=>"93", "H"=>"12", "C"=>"0"}]
#data["snapshot"][2] is as follows:
[{"date"=>"1455672010", "total"=>"**70**", "I"=>"12", "L"=>"5", "M"=>"5662", "H"=>"125", "C"=>"0"},
{"date"=>"1455595298", "total"=>"**43212**", "I"=>"56", "L"=>"6", "M"=>"5662", "H"=>"125", "C"=>"0"},
{"date"=>"1455336016", "total"=>"**55525**", "I"=>"100", "L"=>"19", "M"=>"5593", "H"=>"121", "C"=>"0"}]
My Question Is Ultimately:
How do I convert (flatten?) the 3 existing array of hashes (#data["snapshot"][0], #data["snapshot"][1], and #data["snapshot"][2]) into a single array of hashes in the following format?
[{"date"=>"1455672010", "CameFromDataSource0"=>"817", "CameFromDataSource1"=>"70", "CameFromDataSource2"=>"70"},
{"date"=>"1455595298", "CameFromDataSource0"=>"40", "CameFromDataSource1"=>"54", "CameFromDataSource2"=>"43212"},
{"date"=>"1455336016", "CameFromDataSource0"=>"555", "CameFromDataSource1"=>"25", "CameFromDataSource2"=>"55525"}]
TL;DR
snapshots.each_with_object(Hash.new {|hsh, date| hsh[date] = { "date" => date } })
.with_index do |(snapshot, hsh), i|
snapshot["data"].each {|datum| hsh[datum["date"]]["data#{i}"] = datum["total"] }
end.values
How it works
I'll break it down so you see how each part works. Here's our data (extraneous keys elided for clarity):
snapshots = [
{ "dataSourceID" => "152970",
"data" => [ { "date" => "1455672010", "total" => "817" },
{ "date" => "1455595298", "total" => "40" },
{ "date" => "1455336016", "total" => "555" } ]
}
{ "dataSourceID" => "33151",
"data" => [ { "date" => "1455672010", "total" => "70" },
{ "date" => "1455595298", "total" => "54" },
{ "date" => "1455336016", "total" => "25" } ]
},
{ "dataSourceID" => "52165",
"data" => [ { "date" => "1455672010", "total" => "70" },
{ "date" => "1455595298", "total" => "43212" },
{ "date" => "1455336016", "total" => "55525" } ]
}
]
Most of the magic is here:
result_hash = Hash.new {|hsh, date| hsh[date] = { "date" => date } }
Here we're using the Hash's default proc to automatically initialize new keys in the following way:
result_hash = Hash.new {|hsh, date| hsh[date] = { "date" => date } }
p result_hash["1455672010"]
# => { "date" => "1455672010" }
p result_hash
# => { "1455672010" => { "date" => "1455672010" } }
Simply accessing result_hash[foo] creates the hash { "date" => foo } and assigns it to result_hash[foo]. This enables the following:
result_hash["1455672010"]["data0"] = "817"
p result_hash
# => { "1455672010" => { "date" => "1455672010", "data0" => "817" } }
Magic!
Now suppose we have the following data:
data = [ { "date" => "1455672010", "total" => "817" },
{ "date" => "1455595298", "total" => "40" },
{ "date" => "1455336016", "total" => "555" } ]
Using our magic result_hash, we can do this:
data.each do |datum|
result_hash[datum["date"]]["data0"] = datum["total"]
end
p result_hash
# => { "1455672010" => { "date" => "1455672010", "data0" => "817" },
# "1455595298" => { "date" => "1455595298", "data0" => "40" },
# "1455336016" => { "date" => "1455336016", "data0" => "555" } }
See where I'm going with this? Here's all of our data, finally:
snapshots = [
{ "dataSourceID" => "152970",
"data" => [ { "date" => "1455672010", "total" => "817" },
{ "date" => "1455595298", "total" => "40" },
{ "date" => "1455336016", "total" => "555" } ]
}
{ "dataSourceID" => "33151",
"data" => [ { "date" => "1455672010", "total" => "70" },
{ "date" => "1455595298", "total" => "54" },
{ "date" => "1455336016", "total" => "25" } ]
},
{ "dataSourceID" => "52165",
"data" => [ { "date" => "1455672010", "total" => "70" },
{ "date" => "1455595298", "total" => "43212" },
{ "date" => "1455336016", "total" => "55525" } ]
}
]
Instead of hard-coding "data0", we can iterate over the snapshots hashes using each_with_index and build that key ("data0", then "data1", and so on) for each iteration. Inside that loop we can do exactly what we did above but with the "data" array from each snapshots hash:
result_hash = Hash.new {|hsh, date| hsh[date] = { "date" => date } }
snapshots.each_with_index do |snapshot, i|
data_key = "data#{i}"
snapshot["data"].each do |datum|
date = datum["date"]
result_hash[date][data_key] = datum["total"]
end
end
p result_hash.values
# => [ { "date" => "1455672010", "data0" => "817", "data1" => "70", "data2" => "70" },
# { "date" => "1455595298", "data0" => "40", "data1" => "54", "data2" => "43212" },
# { "date" => "1455336016", "data0" => "555", "data1" => "25", "data2" => "55525" } ]
Of course, this can be condensed some, which I've done in TL;DR above.
This is one way to do it.
Code
def convert(data)
data.each_with_object({}) { |a,h|
a.each { |g| h.update(g["date"]=>[g["total"][/\d+/]]) { |_,o,n| o+n } } }.
map { |date, arr| arr.each_with_index.with_object({"date"=>date}) { |(e,i),h|
h["key#{i}"] = e } }
end
Example
convert(data)
#=> [{"date"=>"1455672010", "key0"=>"817", "key1"=>"70", "key2"=>"70"},
# {"date"=>"1455595298", "key0"=>"40", "key1"=>"54", "key2"=>"43212"},
# {"date"=>"1455336016", "key0"=>"555", "key1"=>"25", "key2"=>"55525"}]
Two steps
You can see that I've done this in two steps. First construct a hash:
f = data.each_with_object({}) { |a,h| a.each { |g|
h.update(g["date"]=>[g["total"][/\d+/]]) { |_,o,n| o+n } } }
#=> {"1455672010"=>["817", "70", "70"],
# "1455595298"=>["40", "54", "43212"],
# "1455336016"=>["555", "25", "55525"]}
Here I have used the form of Hash#update (aka merge!) that employs a block ({ |_,o,n| o+n }) to determine the values of keys that are present in both hashes being merged.
Then convert the hash to the desired format:
f.map { |date, arr| arr.each_with_index.with_object({"date"=>date}) { |(e,i),h|
h["key#{i}"] = e } }
#=> [{"date"=>"1455672010", "key0"=>"817", "key1"=>"70", "key2"=>"70"},
# {"date"=>"1455595298", "key0"=>"40", "key1"=>"54", "key2"=>"43212"},
# {"date"=>"1455336016", "key0"=>"555", "key1"=>"25", "key2"=>"55525"}]
i have the following query
$queryDefinition = [
'query' => [
'bool' => [
'must' => [
[
'query_string' => [
'default_field' => '_all',
'query' => $term
]
]
],
'must_not' => [],
'should' => []
],
],
//'filter' => [
// 'limit' => ['value' => 3],
//],
'from' => 0,
'size' => 50,
'sort' => [],
'facets' => [
'types' => [
'terms' => ['field' => '_type']
]
]
];
in the index we have 5 types, and i would like to show only 3 results from each type, for autocomplete. when i set the filter limit only the results from the first type are filtered, for other types i get all the results.
how can i do this?
thanks
{
"aggregations": {
"types": {
"terms": {
"field": "_type"
},
"hits": {
"top_hits": {
"size": 3
}
}
}
}
}
If you're using ElasticSearch 1.4.0, you can use terms aggregation on "_type" field and use top hits aggregation as sub aggregation and set its size to 3 (in your case).
Hope that helps.