/
UtilLibrary.py
400 lines (324 loc) · 12.6 KB
/
UtilLibrary.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
import requests
from SSHLibrary import SSHLibrary
import robot
import time
import re
import json
import warnings
__author__ = "Basheeruddin Ahmed"
__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
__license__ = "New-style BSD"
__email__ = "syedbahm@cisco.com"
global _cache
def get(url, userId="admin", password="admin"):
"""Helps in making GET REST calls"""
warnings.warn(
"Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
DeprecationWarning,
)
headers = {}
headers["Accept"] = "application/xml"
# Send the GET request
session = _cache.switch("CLUSTERING_GET")
resp = session.get(url, headers=headers, auth=(userId, password))
# resp = session.get(url,headers=headers,auth={userId,password})
# Read the response
return resp
def nonprintpost(url, userId, password, data):
"""Helps in making POST REST calls without outputs"""
warnings.warn(
"Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
DeprecationWarning,
)
if userId is None:
userId = "admin"
if password is None:
password = "admin"
headers = {}
headers["Content-Type"] = "application/json"
# headers['Accept']= 'application/xml'
session = _cache.switch("CLUSTERING_POST")
resp = session.post(
url, data.encode("utf-8"), headers=headers, auth=(userId, password)
)
return resp
def post(url, userId, password, data):
"""Helps in making POST REST calls"""
warnings.warn(
"Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
DeprecationWarning,
)
if userId is None:
userId = "admin"
if password is None:
password = "admin"
print("post request with url " + url)
print("post request with data " + data)
headers = {}
headers["Content-Type"] = "application/json"
# headers['Accept'] = 'application/xml'
session = _cache.switch("CLUSTERING_POST")
resp = session.post(
url, data.encode("utf-8"), headers=headers, auth=(userId, password)
)
# print(resp.raise_for_status())
print(resp.headers)
if resp.status_code >= 500:
print(resp.text)
return resp
def delete(url, userId="admin", password="admin"):
"""Helps in making DELET REST calls"""
warnings.warn(
"Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
DeprecationWarning,
)
print("delete all resources belonging to url" + url)
session = _cache.switch("CLUSTERING_DELETE")
resp = session.delete(url, auth=(userId, password)) # noqa
def Should_Not_Be_Type_None(var):
"""Keyword to check if the given variable is of type NoneType. If the
variable type does match raise an assertion so the keyword will fail
"""
if var is None:
raise AssertionError("the variable passed was type NoneType")
return "PASS"
def execute_ssh_command(ip, username, password, command):
"""Execute SSH Command
use username and password of controller server for ssh and need
karaf distribution location like /root/Documents/dist
"""
print("executing ssh command")
lib = SSHLibrary()
lib.open_connection(ip)
lib.login(username=username, password=password)
print("login done")
cmd_response = lib.execute_command(command)
print("command executed : " + command)
lib.close_connection()
return cmd_response
def wait_for_controller_up(ip, port="8181"):
url = (
"http://"
+ ip
+ ":"
+ str(port)
+ "/restconf/config/opendaylight-inventory:nodes/node/controller-config/yang-ext:mount/config:modules"
)
print("Waiting for controller " + ip + " up.")
# Try 30*10s=5 minutes for the controller to be up.
for i in xrange(30):
try:
print("attempt %s to url %s" % (str(i), url))
resp = get(url, "admin", "admin")
print("attempt %s response is %s" % (str(i), str(resp)))
print(resp.text)
if "clustering-it-provider" in resp.text:
print("Wait for controller " + ip + " succeeded")
return True
except Exception as e:
print(e)
time.sleep(10)
print("Wait for controller " + ip + " failed")
return False
def startAllControllers(username, password, karafhome, port, *ips):
# Start all controllers
for ip in ips:
execute_ssh_command(ip, username, password, karafhome + "/bin/start")
# Wait for all of them to be up
for ip in ips:
rc = wait_for_controller_up(ip, port)
if rc is False:
return False
return True
def startcontroller(ip, username, password, karafhome, port):
execute_ssh_command(ip, username, password, karafhome + "/bin/start")
return wait_for_controller_up(ip, port)
def stopcontroller(ip, username, password, karafhome):
executeStopController(ip, username, password, karafhome)
wait_for_controller_stopped(ip, username, password, karafhome)
def executeStopController(ip, username, password, karafhome):
execute_ssh_command(ip, username, password, karafhome + "/bin/stop")
def stopAllControllers(username, password, karafhome, *ips):
for ip in ips:
executeStopController(ip, username, password, karafhome)
for ip in ips:
wait_for_controller_stopped(ip, username, password, karafhome)
def wait_for_controller_stopped(ip, username, password, karafHome):
lib = SSHLibrary()
lib.open_connection(ip)
lib.login(username=username, password=password)
# Wait 1 minute for the controller to stop gracefully
tries = 20
i = 1
while i <= tries:
stdout = lib.execute_command("ps -axf | grep karaf | grep -v grep | wc -l")
processCnt = stdout[0].strip("\n")
print("processCnt: " + processCnt)
if processCnt == "0":
break
i = i + 1
time.sleep(3)
lib.close_connection()
if i > tries:
print("Killing controller")
kill_controller(ip, username, password, karafHome)
def clean_journal(ip, username, password, karafHome):
execute_ssh_command(ip, username, password, "rm -rf " + karafHome + "/*journal")
def kill_controller(ip, username, password, karafHome):
execute_ssh_command(
ip,
username,
password,
"ps axf | grep karaf | grep -v grep | awk '{print \"kill -9 \" $1}' | sh",
)
def isolate_controller(controllers, username, password, isolated):
"""Isolate one controller from the others in the cluster
:param controllers: A list of ip addresses or host names as strings.
:param username: Username for the controller to be isolated.
:param password: Password for the controller to be isolated.
:param isolated: Number (starting at one) of the controller to be isolated.
:return: If successful, returns "pass", otherwise returns the last failed IPTables text.
"""
isolated_controller = controllers[isolated - 1]
for controller in controllers:
if controller != isolated_controller:
base_str = "sudo iptables -I OUTPUT -p all --source "
cmd_str = (
base_str
+ isolated_controller
+ " --destination "
+ controller
+ " -j DROP"
)
execute_ssh_command(isolated_controller, username, password, cmd_str)
cmd_str = (
base_str
+ controller
+ " --destination "
+ isolated_controller
+ " -j DROP"
)
execute_ssh_command(isolated_controller, username, password, cmd_str)
ip_tables = execute_ssh_command(
isolated_controller, username, password, "sudo iptables -L"
)
print(ip_tables)
iso_result = "pass"
for controller in controllers:
controller_regex_string = (
r"[\s\S]*" + isolated_controller + " *" + controller + r"[\s\S]*"
)
controller_regex = re.compile(controller_regex_string)
if not controller_regex.match(ip_tables):
iso_result = ip_tables
controller_regex_string = (
r"[\s\S]*" + controller + " *" + isolated_controller + r"[\s\S]*"
)
controller_regex = re.compile(controller_regex_string)
if not controller_regex.match(ip_tables):
iso_result = ip_tables
return iso_result
def rejoin_controller(controllers, username, password, isolated):
"""Return an isolated controller to the cluster.
:param controllers: A list of ip addresses or host names as strings.
:param username: Username for the isolated controller.
:param password: Password for the isolated controller.
:param isolated: Number (starting at one) of the isolated controller isolated.
:return: If successful, returns "pass", otherwise returns the last failed IPTables text.
"""
isolated_controller = controllers[isolated - 1]
for controller in controllers:
if controller != isolated_controller:
base_str = "sudo iptables -D OUTPUT -p all --source "
cmd_str = (
base_str
+ isolated_controller
+ " --destination "
+ controller
+ " -j DROP"
)
execute_ssh_command(isolated_controller, username, password, cmd_str)
cmd_str = (
base_str
+ controller
+ " --destination "
+ isolated_controller
+ " -j DROP"
)
execute_ssh_command(isolated_controller, username, password, cmd_str)
ip_tables = execute_ssh_command(
isolated_controller, username, password, "sudo iptables -L"
)
print(ip_tables)
iso_result = "pass"
for controller in controllers:
controller_regex_string = (
r"[\s\S]*" + isolated_controller + " *" + controller + r"[\s\S]*"
)
controller_regex = re.compile(controller_regex_string)
if controller_regex.match(ip_tables):
iso_result = ip_tables
controller_regex_string = (
r"[\s\S]*" + controller + " *" + isolated_controller + r"[\s\S]*"
)
controller_regex = re.compile(controller_regex_string)
if controller_regex.match(ip_tables):
iso_result = ip_tables
return iso_result
def flush_iptables(controllers, username, password):
"""Removes all entries from IPTables on all controllers.
:param controllers: A list of ip address or host names as strings.
:param username: Username for all controllers.
:param password: Password for all controllers.
:return: If successful, returns "pass", otherwise returns "fail".
"""
flush_result = "pass"
for controller in controllers:
print("Flushing ", controller)
cmd_str = "sudo iptables -v -F"
cmd_result = execute_ssh_command(controller, username, password, cmd_str)
print(cmd_result)
success_string = "Flushing chain `INPUT'" + "\n"
success_string += "Flushing chain `FORWARD'" + "\n"
success_string += "Flushing chain `OUTPUT'"
if not cmd_result == success_string:
flush_result = "Failed to flush IPTables. Check Log."
print(".")
print(".")
print(".")
return flush_result
def build_elastic_search_JSON_request(query_String):
data = {
"from": "0",
"size": "1",
"sort": [{"TimeStamp": {"order": "desc"}}],
"query": {"query_string": {"query": query_String}},
}
return json.dumps(data)
def create_query_string_search(data_category, metric_name, node_id, rk_node_id):
query = "TSDRDataCategory:"
query += data_category
query += " AND MetricName:"
query += metric_name
query += ' AND NodeID:"'
query += node_id
query += '" AND RecordKeys.KeyValue:"'
query += rk_node_id
query += '" AND RecordKeys.KeyName:Node AND RecordKeys.KeyValue:0 AND RecordKeys.KeyName:Table'
return query
def create_query_string_count(data_category):
query = "TSDRDataCategory:"
query += data_category
return query
def extract_metric_value_search(response):
return str(response["hits"]["hits"][0]["_source"]["MetricValue"])
def extract_metric_value_count(response):
return int(response["hits"]["total"])
#
# main invoked
if __name__ != "__main__":
_cache = robot.utils.ConnectionCache("No sessions created")
# here create one session for each HTTP functions
_cache.register(requests.session(), alias="CLUSTERING_GET")
_cache.register(requests.session(), alias="CLUSTERING_POST")
_cache.register(requests.session(), alias="CLUSTERING_DELETE")