Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dump_current option #185

Merged
merged 6 commits into from
Aug 12, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added dist/crhc
Binary file not shown.
2 changes: 2 additions & 0 deletions docs/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ The main idea of this script is to collect the information from console.redhat.c
- This option will show some information regarding to the user who requested the token
* - crhc ts dump
- Export the whole Inventory and Subscription information in json format. Some files will be created.
* - crhc ts dump_current
- Export the current Inventory and Subscription information in json format. Some files will be created.
* - crhc ts match
- If the files mentioned above are not around, this feature will call the dump and after that will check both files and will create the 3rd one with the whole information correlated accordingly.
* - crhc ts clean
Expand Down
142 changes: 95 additions & 47 deletions execution/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def inventory_list():
return inventory_full_detail


def inventory_list_all():
def inventory_list_all(current_only=False):
"""
This def will collect all the HBI entries
"""
Expand Down Expand Up @@ -157,7 +157,8 @@ def inventory_list_all():

# For debugin purposes
# num_of_pages = 2




for page in range(1, num_of_pages):
url = (
Expand All @@ -169,50 +170,59 @@ def inventory_list_all():
)
response = connection_request(url)

inventory_batch = []
is_first_server = True
server_detail_url = "https://console.redhat.com/api/inventory/v1/hosts/"
inventory_batch = []
is_first_server = True
server_detail_url = "https://console.redhat.com/api/inventory/v1/hosts/"
for server in response.json()["results"]:
server_id = server["id"]
inventory_batch.append(server_id)
# if its the first entry
if (len(inventory_batch) == 1):
server_detail_url = server_detail_url + server_id
else:
server_detail_url = server_detail_url + "," + server_id

# now call the server details request with up to 50 ids
url = (
server_detail_url
+ "/system_profile"
+ FIELDS_TO_RETRIEVE
)
response_system_profile = connection_request(url)


# now loop through the original server request
for server in response.json()["results"]:

try:
stage_dic["server"] = server
except json.decoder.JSONDecodeError:
stage_dic["server"] = {}

server_id = server["id"]

try:
server_details_list = response_system_profile.json()["results"]
# loop through all the server details - finding the one that matches the id we're looping through
for server_details in server_details_list:
if (server_details["id"] == server_id ):
stage_dic["system_profile"] = server_details["system_profile"]
except json.decoder.JSONDecodeError:
stage_dic["system_profile"] = {}
except KeyError:
stage_dic["system_profile"] = {}

list_of_servers.append(stage_dic)
stage_dic = {}
stale_timestamp = server["stale_timestamp"]
# if you want all systems, or just if you want current systems ,and thisone is current
if (not current_only or (current_only and is_fresh(stale_timestamp))):
inventory_batch.append(server_id)
# if its the first entry
if (len(inventory_batch) == 1):
server_detail_url = server_detail_url + server_id
else:
server_detail_url = server_detail_url + "," + server_id

# now call the server details request with up to 50 ids, assuming that we have some server ids in this batch
if (len(inventory_batch) >0):
url = (
server_detail_url
+ "/system_profile"
+ FIELDS_TO_RETRIEVE
)
response_system_profile = connection_request(url)


# now loop through the original server request
for server in response.json()["results"]:
# check whether we're getting everything - or whether the system is current or not
stale_timestamp = server["stale_timestamp"]
if (not current_only or (current_only and is_fresh(stale_timestamp))):
try:
stage_dic["server"] = server
except json.decoder.JSONDecodeError:
stage_dic["server"] = {}

server_id = server["id"]

try:
server_details_list = response_system_profile.json()["results"]
# loop through all the server details - finding the one that matches the id we're looping through
for server_details in server_details_list:
if (server_details["id"] == server_id ):
stage_dic["system_profile"] = server_details["system_profile"]
except json.decoder.JSONDecodeError:
stage_dic["system_profile"] = {}
except KeyError:
stage_dic["system_profile"] = {}

list_of_servers.append(stage_dic)
stage_dic = {}

return inventory_full_detail

Expand Down Expand Up @@ -407,7 +417,7 @@ def swatch_list():
return response.json()


def swatch_list_all():
def swatch_list_all(current_only=False):
"""
This def will collect all the entries from Subscription Watch
"""
Expand All @@ -427,10 +437,7 @@ def swatch_list_all():
)
# num_of_pages = round(response.json()['meta']['count'] / 100 + 1)

dic_full_list = {
"data": "",
"meta": {"count": response.json()["meta"]["count"]},
}

full_list = []
dup_kvm_servers = []
server_with_no_dupes = []
Expand All @@ -449,7 +456,15 @@ def swatch_list_all():
# count = count + 100

for entry in response.json()["data"]:
full_list.append(entry)
last_seen = entry.get("last_seen")
# either get all systems, or if getting current, check the last seen date
if (not current_only or (current_only and seen_recently(last_seen))):
full_list.append(entry)

dic_full_list = {
"data": "",
"meta": {"count": len(full_list)},
}

# The piece below is just to check/remove the duplicate entries
# caused by kvm/libvirt hypervisors. At this moment, swatch is
Expand Down Expand Up @@ -532,6 +547,39 @@ def swatch_list_all():

return dic_full_list

def is_fresh(stale_timestamp):
stale_date_string = stale_timestamp
is_fresh=True
if (len(stale_date_string) > 19):
stale_date_string = stale_timestamp[:19]
try:
stale_date = datetime.datetime.strptime(stale_date_string,"%Y-%m-%dT%H:%M:%S")
current_date = datetime.datetime.now()
if (stale_date < current_date):
is_fresh = False
except Exception as e:
is_fresh=True
print("Exception in is_fresh : " + str(e))

return is_fresh

def seen_recently(last_seen):
stale_date_string = last_seen
seen_recently=True
if (len(stale_date_string) > 19):
stale_date_string = last_seen[:19]
try:
last_seen_date = datetime.datetime.strptime(stale_date_string,"%Y-%m-%dT%H:%M:%S")
stale_date = last_seen_date + datetime.timedelta(days=1)
current_date = datetime.datetime.now()
if (stale_date < current_date):
seen_recently = False
except Exception as e:
seen_recently=True
print("Exception in seen_recently : " + str(e))

return seen_recently


def swatch_socket_summary():
"""
Expand Down
7 changes: 4 additions & 3 deletions help/help_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,9 +153,10 @@ def help_ts_menu():
crhc ts [command]\n\
\n\
Available Commands:\n\
dump dump the json files, Inventory and Subscription\n\
match match the Inventory and Subscription information\n\
clean cleanup the local 'cache/temporary/dump' files\
dump dump the json files, Inventory and Subscription\n\
dump_current dump the json files with current systems only, Inventory and Subscription\n\
match match the Inventory and Subscription information\n\
clean cleanup the local 'cache/temporary/dump' files\
"
print(content)
return content
Expand Down
17 changes: 15 additions & 2 deletions parse/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,8 +530,21 @@ def troubleshoot_sub_menu():

try:
if (sys.argv[1] == "ts") and (sys.argv[2] == "dump"):
ts.dump_inv_json()
ts.dump_sw_json()
ts.dump_inv_json(False)
ts.dump_sw_json(False)
ts.dump_patch_json()
ts.dump_vulnerability_json()
ts.dump_advisor_json()
ts.compress_json_files()
sys.exit()
except IndexError as e:
# print("Error1: {}".format(e))
...

try:
if (sys.argv[1] == "ts") and (sys.argv[2] == "dump_current"):
ts.dump_inv_json(True)
ts.dump_sw_json(True)
ts.dump_patch_json()
ts.dump_vulnerability_json()
ts.dump_advisor_json()
Expand Down
12 changes: 6 additions & 6 deletions troubleshoot/ts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from conf import conf


def dump_inv_json():
def dump_inv_json(current_only):
"""
Function to dump only Inventory information
"""
Expand All @@ -25,13 +25,13 @@ def dump_inv_json():
conf.INV_JSON_FILE
)
)
inventory = execution.inventory_list_all()
inventory = execution.inventory_list_all(current_only)

file_obj = open(conf.INV_JSON_FILE, "w")
file_obj.write(json.dumps(inventory, indent=4))


def dump_sw_json():
def dump_sw_json(current_only):
"""
Function to dump only Swatch information
"""
Expand All @@ -41,7 +41,7 @@ def dump_sw_json():
conf.SW_JSON_FILE
)
)
swatch = execution.swatch_list_all()
swatch = execution.swatch_list_all(current_only)

file_obj = open(conf.SW_JSON_FILE, "w")
file_obj.write(json.dumps(swatch, indent=4))
Expand Down Expand Up @@ -135,7 +135,7 @@ def match_hbi_sw():
"File {} already in place, using it.".format(conf.INV_JSON_FILE)
)
except FileNotFoundError:
dump_inv_json()
dump_inv_json(False)
file_obj = open(conf.INV_JSON_FILE, "r")
inventory = json.load(file_obj)

Expand All @@ -144,7 +144,7 @@ def match_hbi_sw():
swatch = json.load(file_obj)
print("File {} already in place, using it.".format(conf.SW_JSON_FILE))
except FileNotFoundError:
dump_sw_json()
dump_sw_json(False)
file_obj = open(conf.SW_JSON_FILE, "r")
swatch = json.load(file_obj)

Expand Down