Ruby hash of hash - ruby

I need to have a hash to collect results, for example:
results = Hash.new()
results['127.0.0.1'] = Hash.new()
results['127.0.0.2'] = Hash.new()
results['127.0.0.1']['port'] = '80'
results['127.0.0.2']['port'] = '80'
results['127.0.0.1']['ver'] = 'abc'
results['127.0.0.1']['ver'] = 'def'
It seem works fine, but now can I show results? :)
I would like have:
ip: 127.0.0.1
port: 80
ver: abc
ip: 127.0.0.2
port: 80
ver: def
Thank you very much!

results.each do |k, v|
puts "IP: #{k}, Port: #{v['port']}, Ver: #{v['ver']}"
end

Better way to define hash:
result = {
'127.0.0.1' => {
port: 80,
ver: 'abc'
},
'127.0.0.2' => {
port: 80,
ver: 'def'
}
}
and then:
result.each do |key, value|
puts "ip: #{key}"
value.each { |k,v| puts "\t#{key}: #{value}" }
end
This method will work also if you add some extra options to hash.
But if you want it only for debugging read about awesome_print.

Related

How to deep_transform_values with keys in Ruby?

Active Support's deep_transform_values recursively transforms all values of a hash. However, is there a similar method that would allow to access the keys of values while transforming?
I'd like to be able to do the following:
keys_not_to_transform = ['id', 'count']
response = { result: 'ok', errors: [], data: { id: '123', price: '100.0', quotes: ['1.0', '2.0'] }, count: 10 }
response.deep_transform_values! do |key, value|
# Use value's key to help decide what to do
return value if keys_not_to_transform.any? key.to_s
s = value.to_s
if s.present? && /\A[+-]?\d+(\.\d+)?\z/.match?(s)
return BigDecimal(s)
else
value
end
end
#Expected result
# =>{:result=>"ok", :errors=>[], :data=>{:id=>"123", :price=>0.1e3, :quotes=>[0.1e1, 0.2e1]}, :count=>10}
Note that we are not interested in transforming the key itself, just having it on hand while transforming the corresponding values.
You could use Hash#deep_merge! (provided by ActiveSupport) like so:
keys_not_to_transform = ['id', 'count']
transform_value = lambda do |value|
s = value.to_s
if s.present? && /\A[+-]?\d+(\.\d+)?\z/.match?(s)
BigDecimal(s)
else
value
end
end
transform = Proc.new do |key,value|
if keys_not_to_transform.include? key.to_s
value
elsif value.is_a?(Array)
value.map! do |v|
v.is_a?(Hash) ? v.deep_merge!(v,&transform) : transform_value.(v)
end
else
transform_value.(value)
end
end
response = { result: 'ok', errors: [], data: { id: '123', price: '100.0', quotes: ['1.0', '2.0'], other: [{id: '124', price: '17.0'}] }, count: 10 }
response.deep_merge!(response, &transform)
This outputs:
#=>{:result=>"ok", :errors=>[], :data=>{:id=>"123", :price=>0.1e3, :quotes=>[0.1e1, 0.2e1], :other=>[{:id=>"124", :price=>0.17e2}]}, :count=>10}
I'd just implement the necessary transformation logic with plain old Ruby and a bit of recursion, no external dependencies needed. For example:
def transform(hash, ignore_keys: [])
hash.each_with_object({}) do |(key, value), result|
if value.is_a?(Hash)
result[key] = transform(value, ignore_keys: ignore_keys)
elsif ignore_keys.include?(key.to_s)
result[key] = value
elsif value.to_s =~ /\A[+-]?\d+(\.\d+)?\z/
result[key] = BigDecimal(value)
else
result[key] = value
end
end
end
keys_not_to_transform = %w[id count]
response = { result: 'ok', errors: [], data: { id: '123', price: '100.0' }, count: 10 }
transform(response, ignore_keys: keys_not_to_transform)
# => {:result=>"ok", :errors=>[], :data=>{:id=>"123", :price=>#<BigDecimal:5566613bb128,'0.1E3',9(18)>}, :count=>10}

Serialize an array of hashes

I have an array of hashes:
records = [
{
ID: 'BOATY',
Name: 'McBoatface, Boaty'
},
{
ID: 'TRAINY',
Name: 'McTrainface, Trainy'
}
]
I'm trying to combine them into an array of strings:
["ID,BOATY","Name,McBoatface, Boaty","ID,TRAINY","Name,McTrainface, Trainy"]
This doesn't seem to do anything:
irb> records.collect{|r| r.each{|k,v| "\"#{k},#{v}\"" }}
#=> [{:ID=>"BOATY", :Name=>"McBoatface, Boaty"}, {:ID=>"TRAINY", :Name=>"McTrainface, Trainy"}]
** edit **
Formatting (i.e. ["Key0,Value0","Key1,Value1",...] is required to match a vendor's interface.
** /edit **
What am I missing?
records.flat_map(&:to_a).map { |a| a.join(',') }
#=> ["ID,BOATY", "Name,McBoatface, Boaty", "ID,TRAINY", "Name,McTrainface, Trainy"]
records = [
{
ID: 'BOATY',
Name: 'McBoatface, Boaty'
},
{
ID: 'TRAINY',
Name: 'McTrainface, Trainy'
}
]
# strait forward code
result= []
records.each do |hash|
hash.each do |key, value|
result<< key.to_s
result<< value
end
end
puts result.inspect
# a rubyish way (probably less efficient, I've not done the benchmark)
puts records.map(&:to_a).flatten.map(&:to_s).inspect
Hope it helps.
li = []
records.each do |rec|
rec.each do |k,v|
li << "#{k.to_s},#{v.to_s}".to_s
end
end
print li
["ID,BOATY", "Name,McBoatface, Boaty", "ID,TRAINY", "Name,McTrainface,
Trainy"]
You sure you wanna do it this way?
Check out Marshal. Or JSON.
You could even do it this stupid way using Hash#inspect and eval:
serialized_hashes = records.map(&:inspect) # ["{ID: 'Boaty'...", ...]
unserialized = serialized_hashes.map { |s| eval(s) }

Best way to parse json in Ruby for the format given

For my rails app, SQL query result is received in the below format.
#data= JSON.parse(request,symbolize_names: true)[:data]
# #data sample
[{"time":"2017-11-14","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-15","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-16","A":2,"B":1,"C":1,"D":0,"E":1},
{"time":"2017-11-17","A":0,"B":0,"C":1,"D":0,"E":1},
{"time":"2017-11-20","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-21","A":6,"B":17,"C":0,"D":0,"E":1}]
But I want the data in the format
[{"name":"A","data":{"2017-11-16":2,"2017-11-21":6}},
{"name":"B","data":{"2017-11-16":1,"2017-11-21":17}},
{"name":"C","data":{"2017-11-16":1,"2017-11-17":1}},
{"name":"D","data":{}},
{"name":"E","data":{"2017-11-16":1,"2017-11-17":1,"2017-11-21":1}}]
What is the best way to parse this in Ruby?
I tried using #data.each method, but it is lengthy.
I am totally new to Ruby. Any help would be appreciated.
Oddly specific question, but kinda an interesting problem so I took a stab at it. If this is coming from a SQL database I feel like the better solution would be to have SQL format the data for you as opposed to transforming it in ruby.
#data = JSON.parse(request,symbolize_names: true)[:data]
intermediate = {}
#data.each do |row|
time = row.delete(:time)
row.each do |key, val|
intermediate[key] ||= {data: {}}
intermediate[key][:data][time] = val if val > 0
end
end
transformed = []
intermediate.each do |key, val|
transformed << {name: key.to_s, data: val}
end
At the end of this transformed will contain the transformed data. Horrible variable names, and I hate having to do this in two passes. But got something working and figured I would share in case it is helpful.
I agree with csexton that it looks like a better query to source the data would be the ultimate solution here.
Anyway, here's a solution that's similar to csexton's but uses nested default Hash procs to simplify some of the operations:
def pivot(arr, column)
results = Hash.new do |hash, key|
hash[key] = Hash.new(0)
end
arr.each do |hash|
data = hash.dup
pivot = data.delete(column)
data.each_pair do |name, value|
results[name][pivot] += value
end
end
results.map { |name, data| {
name: name.to_s,
data: data.delete_if { |_, sum| sum.zero? }
}}
end
pivot(#data, :time) # => [{:name=>"A", :data=>{"2017-11-16"=>2, "2017-11-21"=>6}}, ..
Here's a more "Ruby-ish" (depending on who you ask) solution:
def pivot(arr, column)
arr
.flat_map do |hash|
hash
.to_a
.delete_if { |key, _| key == column }
.map! { |data| data << hash[column] }
end
.group_by(&:shift)
.map { |name, outer| {
name: name.to_s,
data: outer
.group_by(&:last)
.transform_values! { |inner| inner.sum(&:first) }
.delete_if { |_, sum| sum.zero? }
}}
end
pivot(#data, :time) # => [{:name=>"A", :data=>{"2017-11-16"=>2, "2017-11-21"=>6}}, ..
Quite frankly, I find it pretty unreadable and I wouldn't want to support it. :)
arr = [{"time":"2017-11-14","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-15","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-16","A":2,"B":1,"C":1,"D":0,"E":1},
{"time":"2017-11-17","A":0,"B":0,"C":1,"D":0,"E":1},
{"time":"2017-11-20","A":0,"B":0,"C":0,"D":0,"E":0},
{"time":"2017-11-21","A":6,"B":17,"C":0,"D":0,"E":1}]
(arr.first.keys - [:time]).map do |key|
{ name: key.to_s,
data: arr.select { |h| h[key] > 0 }.
each_with_object({}) { |h,g| g.update(h[:time]=>h[key]) } }
end
#=> [{:name=>"A", :data=>{"2017-11-16"=>2, "2017-11-21"=>6}},
# {:name=>"B", :data=>{"2017-11-16"=>1, "2017-11-21"=>17}},
# {:name=>"C", :data=>{"2017-11-16"=>1, "2017-11-17"=>1}},
# {:name=>"D", :data=>{}},
# {:name=>"E", :data=>{"2017-11-16"=>1, "2017-11-17"=>1, "2017-11-21"=>1}}]
Note that
arr.first.keys - [:time]
#=> [:A, :B, :C, :D, :E]

how to parse a yaml file?

I have a machine.yml file as follows:
---
machines:
A:
ip: ABC
pass: vass
user: A
B:
ip: XYZ
pass: grass
user: B
C:
ip: klm
pass: pass
user: C
I tried to parse the above file as follows:
machines = YAML.load_file('machine.yml')
machines = machines['machines']
## Iterate through entries in YAML file
machines.each_value do |machines|
var = [machines["A"]["ip"], machines["A"]["pass"], machines["B"]["ip"],machines["B"]["pass"], machines["C"]["ip"],machines["C"]["pass"]]
# var should have all the values
end
The "var" should contain all the values as a string. But I am not able execute the above piece as it's throwing errors. How can I parse all the values of YAML separately?
Your code
Since you hardcode the keys you're interested in, you don't need to iterate with each_values :
machines = YAML.load_file('machine.yml')
machines = machines['machines']
var = [machines["A"]["ip"], machines["A"]["pass"], machines["B"]["ip"],machines["B"]["pass"], machines["C"]["ip"],machines["C"]["pass"]]
Alternative
First, you should try not to override the same variable every time (machines). It's called "shadowing", and it makes it harder to understand and use your code.
Depending on what you want to do, each, map or flat_map could help you :
require 'yaml'
yaml_hash = YAML.load_file('machine.yml')
p yaml_hash['machines']
#=> {"A"=>{"ip"=>"ABC", "pass"=>"vass", "user"=>"A"}, "B"=>{"ip"=>"XYZ", "pass"=>"grass", "user"=>"B"}, "C"=>{"ip"=>"klm", "pass"=>"pass", "user"=>"C"}}
yaml_hash['machines'].each do |letter, hash|
p letter
p hash['ip']
p hash['pass']
end
#=>
# "A"
# "ABC"
# "vass"
# "B"
# "XYZ"
# "grass"
# "C"
# "klm"
# "pass"
p yaml_hash['machines'].values
#=> [{"ip"=>"ABC", "pass"=>"vass", "user"=>"A"}, {"ip"=>"XYZ", "pass"=>"grass", "user"=>"B"}, {"ip"=>"klm", "pass"=>"pass", "user"=>"C"}]
p yaml_hash['machines'].values.map { |hash| hash.values_at('ip', 'pass') }
#=> ["ABC", "vass"], ["XYZ", "grass"], ["klm", "pass"]]
p yaml_hash['machines'].values.flat_map { |hash| hash.values_at('ip', 'pass') }
#=> ["ABC", "vass", "XYZ", "grass", "klm", "pass"]

Mapping headers via File.readlines

This works great for me.
lines = CSV.readlines("log.csv")
a = lines.map{|s| {timestamp: s[0], url: s[1], ip: s[3]} }
puts a
Amended as clearer.
lines = CSV.readlines("log.csv").map do |s|
s = {timestamp: s[0], url: s[1], ip: s[3]}
end
puts a
But I am looking at doing additional filtering using grep, and this fails miserably.
1.9.3-p448 :129 > lines = File.readlines("log.csv").grep(/watch\?v=/)
=> []
1.9.3-p448 :134 > lines.map{|s| {timestamp: s[0], url: s[1], ip: s[3]} }
=> [{:timestamp=>"\"", :url=>"2", :ip=>" "}, {:timestamp=>"\"", :url=>"2", :ip=>" "}
Solution
a = File.readlines('log.csv').grep(/watch\?v=/).map do |s|
s = s.parse_csv;
{ timestamp: s[0], url: s[1], ip: s[3] }
end
puts a
Thanks.
The CSV class adds the parse_csv method to String. So you can parse the file records one at a time like this
lines = File.readlines("log.csv").grep(/watch\?v=/)
a = lines.map{ |s| s = s.parse_csv; {timestamp: s[0], url: s[1], ip: s[3]} }
puts a
or, preferably
a = File.readlines('log.csv').grep(/watch\?v=/).map do |s|
s = s.parse_csv;
{ timestamp: s[0], url: s[1], ip: s[3] }
end
puts a

Resources