-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathchef-span.rb
369 lines (344 loc) · 14.1 KB
/
chef-span.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
#!/usr/bin/ruby
#
# Generated by Chef for <%= node[:fqdn] %>
#
# Any local changes will be deleted.
#
# Copyright 2013, Edmunds.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# A script which queries chef server API(s) and builds an output file with nodes and edges.
# The nodes and edges are a representation of the connectivity visible through chef and
# can be visualized with graphing software.
#
# For usage see '-h' output.
#
require "rubygems";
#require "json";
require "optparse"
require "pp";
require "resolv";
require 'chef/config'
require 'chef/log'
require 'chef/rest'
options = {}
optparse = OptionParser.new do |opts|
opts.banner = "Usage: #{File.basename($0)} [options] -a <chef_server_url1,client1,keyfile1,[...]>"
options[:inputfile] = ""
options[:outputfile] = "/tmp/output.gdf"
# arbitrarily high number for max environments - use this to limit queries during debugging
options[:max_environments_count] = 1000
opts.on('-h','--help','This help print out.') do
puts opts
exit
end
opts.on('-m','--max_environments COUNT','Max number of environments to process') do |c|
if ( c.to_i < 1 ) then
puts "The max number of environments should be at least 1 or greater."
exit
end
options[:max_environments_count] = c.to_i
end
opts.on('-a','--api chef_server_url,client,keyfile', Array, 'Required components for querying the chef server API') do |l|
if ( l.length % 3 != 0 ) then
puts "The number of comma-separated elements under -a (--api) should be a multiple of three."
puts "Example: \"-a https://chef.server.url:444,chef_client_name,/home/user/.chef/key.pem\""
exit
end
options[:apis] = {}
l.each_slice(3) do |chef_server_url,client,keyfile|
# Make a feeble attempt to confirm the chef server url is reasonable
if ! chef_server_url.include? "http" then
puts "The supplied chef server url doesn't look right - there's no \"http\" in it."
puts "Example: \"-a https://chef.server.url:444,chef_client_name,/home/user/.chef/key.pem\""
exit
end
options[:apis]["#{chef_server_url}"] = {}
options[:apis]["#{chef_server_url}"][:client] = client
options[:apis]["#{chef_server_url}"][:keyfile] = keyfile
end
end
opts.on('-o','--output FILE','Output file for graph content.') do |f|
options[:outputfile] = f
end
opts.on('-i','--input FILE','Input file for manual nodes.') do |f|
if ( File.file?(f) ) then
options[:inputfile] = f
else
puts "Input file \"#{f}\" doesn't exist."
exit
end
end
end
optparse.parse!
# Required CLI parameters
if ( options[:apis].nil? ) then
puts "API parameters absent."
puts "Example: \"-a https://chef.server.url:444,chef_client_name,/home/user/.chef/key.pem\""
puts optparse
exit
end
max_number_of_environments = options[:max_environments_count]
outputfile = options[:outputfile]
manual_nodes = options[:inputfile]
# a collection of end points
nodes = Hash.new{ |h,k| h[k] = Hash.new(&h.default_proc) }
# a collection of end points which were inferred through data in the original end points
inferred_nodes = Hash.new{ |h,k| h[k] = Hash.new(&h.default_proc) }
# a mapping of ips to end points
ips = Hash.new
# a collection of edges between end points
edges = Hash.new{ |h,k| h[k] = Hash.new(&h.default_proc) }
# Go through each chef server API end point
options[:apis].each do |url,auth|
chef_server_url = url
client_name = auth[:client]
signing_key_filename = auth[:keyfile]
# Initialize the rest object
rest = Chef::REST.new(chef_server_url, client_name, signing_key_filename)
JSON.create_id = ""
# We need to loop through the environments to avoid crushing the chef server API
puts "Pulling environment list from chef server: #{chef_server_url}\n"
environments = rest.get_rest("/environments")
environments_counter = 0
environments.keys.each do |environment|
if environments_counter >= max_number_of_environments then
break
end
if environment.include?("sandbox") or environment.include?("_default") then
puts "Skipping environment: #{environment}\n"
next
end
puts "Processing environment: #{environment}\n"
environments_counter += 1
query = rest.get_rest("/search/node?q=chef_environment:#{environment}")
# populate the collection of end points
query["rows"].each do |node|
if node["automatic"].empty? then
broken_node = node["name"]
puts "WARNING: node #{broken_node} has no automatic attributes ... skipping\n"
next
end
node_id = node["automatic"]["fqdn"]
#puts "Processing node: #{node_id}\n"
nodes["#{node_id}"][:ohai_time_epoch] = node["automatic"]["ohai_time"]
nodes["#{node_id}"][:ohai_time_human] = Time.at(node["automatic"]["ohai_time"].to_i)
nodes["#{node_id}"][:chef_environment] = node["chef_environment"]
nodes["#{node_id}"][:ipaddress] = node["automatic"]["ipaddress"]
nodes["#{node_id}"][:fqdn] = node_id
ips["#{node["automatic"]["ipaddress"]}"] = node_id
# TO DEVELOP - need to harvest any other ips on this node
run_list = ""
node["run_list"].each do |entry|
run_list += entry + "|"
end
nodes["#{node_id}"][:run_list] = run_list
nodes["#{node_id}"][:platform_version] = node["automatic"]["platform_version"]
unless node["default"]["environment"].nil? or node["default"]["environment"]["data_center"].nil? then
nodes["#{node_id}"][:data_center] = node["default"]["environment"]["data_center"]
end
unless node["automatic"]["dmi"].nil? or node["automatic"]["dmi"]["system"].nil? then
nodes["#{node_id}"][:manufacturer] = node["automatic"]["dmi"]["system"]["manufacturer"]
nodes["#{node_id}"][:product_name] = node["automatic"]["dmi"]["system"]["product_name"]
end
nodes["#{node_id}"][:data_source] = chef_server_url
# get NFS filesystems
node["automatic"]["filesystem"].each do |fs,v|
if ( v["fs_type"].eql?( "nfs") ) then
nfs_ip = ""
v["mount_options"].each do |option|
if ( option =~ /^addr=/ ) then
nfs_ip = option.gsub("addr=","")
end
end
nodes["#{node_id}"][:nfs_fs]["#{nfs_ip}"][:local_path] = v["mount"]
nodes["#{node_id}"][:nfs_fs]["#{nfs_ip}"][:filer] = fs.split(":").first
nodes["#{node_id}"][:nfs_fs]["#{nfs_ip}"][:volume] = fs.split(":").last
end
end
# TO DEVELOP - get ARP entries
# get netstat entries
if ( node["automatic"].has_key?("connections") ) then
node["automatic"]["connections"].each do |con,v|
nodes["#{node_id}"][:connections]["#{con}"][:foreignip] = v["foreignip"]
nodes["#{node_id}"][:connections]["#{con}"][:proto] = v["proto"]
nodes["#{node_id}"][:connections]["#{con}"][:lastseen] = v["lastseen"]
nodes["#{node_id}"][:connections]["#{con}"][:locallistenport] = v["locallistenport"]
nodes["#{node_id}"][:connections]["#{con}"][:remoteport] = v["remoteport"]
nodes["#{node_id}"][:connections]["#{con}"][:count] = v["count"]
nodes["#{node_id}"][:connections]["#{con}"][:localip] = v["localip"]
end
end
end
end
end
#puts "Finished querying chef servers.\n"
# pull in manual nodes from file
# assume csv - ignore lines that start with #
# TO DEVELOP - need to enhance with some error checking
unless manual_nodes.empty? then
puts "Loading manual nodes from #{manual_nodes}"
File.open(manual_nodes,"r").each_line do |line|
next if line =~ /^#/
line.chomp!
( name, ipaddress ) = line.split(",");
nodes["#{name}"][:ipaddress] = ipaddress
nodes["#{name}"][:fqdn] = name
nodes["#{name}"][:data_source] = "manual"
ips["#{ipaddress}"] = name
end
end
#puts "Finished loading manual nodes from #{manual_nodes}\n";
#pp nodes
# Make a pass through each of the end points and look for connections to unknown end points
# When an unknown end point is found (unknown IP) then add it to an interim hash
nodes.each do |k,v|
# edges based on nfs filesystems
if ( v.has_key? :nfs_fs ) then
v[:nfs_fs].each do |ip,config|
# if we find a mystery IP, create a dummy node for this end point
if ( ! ips.has_key? ip ) then
begin
newNode = Resolv.getname ip
rescue Exception => e
p "WARNING: failed to perform DNS lookup against #{ip} - #{e}"
end
if newNode.nil? then
newNode = "UNKNOWN-" + ip
end
inferred_nodes["#{newNode}"][:ipaddress] = ip
inferred_nodes["#{newNode}"][:fqdn] = newNode
inferred_nodes["#{newNode}"][:data_source] = "inferred"
ips["#{ip}"] = inferred_nodes["#{newNode}"][:fqdn]
end
end
end
# edges based on node connections
if ( v.has_key? :connections ) then
v[:connections].each do |con,val|
ip = val[:foreignip]
# if we find a mystery IP, create a dummy node for this end point
if ( ! ips.has_key? ip ) then
begin
newNode = Resolv.getname ip
rescue Exception => e
p "WARNING: failed to perform DNS lookup against #{ip} - #{e}"
end
if newNode.nil? then
newNode = "UNKNOWN-" + ip
end
inferred_nodes["#{newNode}"][:ipaddress] = ip
inferred_nodes["#{newNode}"][:fqdn] = newNode
inferred_nodes["#{newNode}"][:data_source] = "inferred"
ips["#{ip}"] = inferred_nodes["#{newNode}"][:fqdn]
end
end
end
end
#puts "Finished building interim hash for inferred IPs.\n"
# Add the inferred nodes to the original set of nodes
nodes.merge!(inferred_nodes);
#puts "Finished merging inferred nodes with found node set.\n"
# Make a pass through the now-complete collection of end points,
# in order to build the collection of edges. The order of which
# type of connection to process first (nfs, tcp/netstat, arp, etc)
# matters, because they will overwrite each other. Set the order so
# that the later connections are the richer data sets
# i.e. nfs should go after tcp connections, because nfs connections
# better data to draw upon and should overwrite the weaker 'tcp 2049' entry
nodes.each do |k,v|
# edges based on node connections
if ( v.has_key? :connections ) then
v[:connections].each do |con,val|
ip = val[:foreignip]
if val[:remoteport].eql? "NA" then
edges["#{k}_#{ips[ip]}"][:directed] = "true"
edges["#{k}_#{ips[ip]}"][:target] = k
edges["#{k}_#{ips[ip]}"][:source] = ips[ip]
elsif val[:locallistenport].eql? "NA"
edges["#{k}_#{ips[ip]}"][:directed] = "true"
edges["#{k}_#{ips[ip]}"][:target] = ips[ip]
edges["#{k}_#{ips[ip]}"][:source] = k
else
edges["#{k}_#{ips[ip]}"][:directed] = "false"
edges["#{k}_#{ips[ip]}"][:target] = k
edges["#{k}_#{ips[ip]}"][:source] = ips[ip]
end
if val[:locallistenport].nil? or val[:proto].nil? or val[:remoteport].nil? then
edges["#{k}_#{ips[ip]}"][:label] = "bogus"
else
relevant_label = ""
if val[:locallistenport].eql? "NA" then
relevant_label = val[:remoteport]
elsif val[:remoteport].eql? "NA" then
relevant_label = val[:locallistenport]
else
relevant_label = "L: " + val[:locallistenport] + " " + val[:proto] + " R: " + val[:remoteport]
end
edges["#{k}_#{ips[ip]}"][:label] = relevant_label
end
edges["#{k}_#{ips[ip]}"][:presence] = val[:count]
end
end
# edges based on nfs filesystems
if ( v.has_key? :nfs_fs ) then
v[:nfs_fs].each do |ip,config|
edges["#{k}_#{ips[ip]}"][:directed] = "true"
edges["#{k}_#{ips[ip]}"][:source] = k
edges["#{k}_#{ips[ip]}"][:target] = ips[ip]
edges["#{k}_#{ips[ip]}"][:label] = "nfs - " + config[:local_path]
edges["#{k}_#{ips[ip]}"][:presence] = "100"
end
end
end
#puts "Finished building edges.\n"
#pp nodes
#pp inferred_nodes
#pp ips
#pp edges
# output the collection of nodes
# TO DEVELOP - error handling for failures to operate on the output file
output = File.open( "#{outputfile}", 'w' )
output << "nodedef>name VARCHAR,label VARCHAR,data_source VARCHAR, ipaddress VARCHAR, \
environment VARCHAR, run_list VARCHAR, platform_version VARCHAR, \
manufacturer VARCHAR, product_name VARCHAR, device_class VARCHAR, \
data_center VARCHAR, ohai_time_human VARCHAR, ohai_time_epoch DOUBLE\n"
nodes.each do |k,v|
if v[:data_source].nil? or v[:ipaddress].nil? then
puts "WARNING: error processing node for " + k + " ... skipping."
else
output << k + "," + k + "," + v[:data_source] + "," + v[:ipaddress] +
",#{ v[:chef_environment].empty? ? "unknown" : v[:chef_environment] }" +
",#{ v[:run_list].empty? ? "unknown" : v[:run_list] }" +
",#{ v[:platform_version].empty? ? "unknown" : v[:platform_version] }" +
",#{ v[:manufacturer].empty? ? "unknown" : v[:manufacturer] }" +
",#{ v[:product_name].empty? ? "unknown" : v[:product_name] }" +
",#{ v[:device_class].empty? ? "unknown" : v[:device_class] }" +
",#{ v[:data_center].empty? ? "unknown" : v[:data_center] }" +
",#{ v[:ohai_time_human].nil? ? "unknown" : v[:ohai_time_human] }" +
",#{ v[:ohai_time_epoch].nil? ? "unknown" : v[:ohai_time_epoch] }" +
"\n"
end
end
# output the collection of edges
output << "edgedef>node1 VARCHAR, node2 VARCHAR, directed BOOLEAN, label VARCHAR, weight DOUBLE, presence DOUBLE\n"
edges.each do |k,v|
if v[:source].nil? or v[:target].nil? or v[:directed].nil? or v[:label].nil? or v[:presence].nil? then
puts "WARNING: error processing edge for " + k + " ... skipping."
else
output << v[:source] + "," + v[:target] + "," + v[:directed] + "," + v[:label] + ",1.5," + v[:presence] + "\n"
end
end
puts "\n\nWriting out graph definition to #{outputfile}"
output.close