### Roan Linde (roan.linde@caafrica.co.za) ### Pre-Sales APIM and APM ### CA Southern Africa import json import requests import random import os import logging # from tinydb import TinyDB, Query # --------------------------------------------------- import sqlite3 import datetime # ---------------------------------------------------- # import smtplib # from email.MIMEMultipart import MIMEMultipart # from email.MIMEBase import MIMEBase # from email import Encoders # from email.mime.text import MIMEText # ------------------------------------------------------- # setup of environmental variables and logging dir_path = os.path.dirname(os.path.realpath(__file__)) requests.packages.urllib3.disable_warnings() logging.basicConfig(filename='app.log', level=logging.INFO, filemode="w", format='[%(asctime)s] %(levelname)s - %(message)s', datefmt='%H:%M:%S') # db = TinyDB('db.json') logging.info('Start of log') # SET_ENV = "PRD" # defined below where function is called SET_ENV="ENV" env = { "ENV": { "OAUTH": "https://:9443/auth/oauth/v2/token", "METRIC_BASE": "https://:9443//analytics/metrics/v1/", "PAPI_BASE": "https://:9443//", "CLIENT": "PAPI_CLIENT_ID", "SECRET": "PAPI_CLIENT_SECRET" } } } def get_token(): token_endpoint = env[SET_ENV]["OAUTH"] client = env[SET_ENV]["CLIENT"] secret = env[SET_ENV]["SECRET"] response = requests.post(token_endpoint, data={'grant_type': 'client_credentials'}, verify=False, allow_redirects=False, auth=(client, secret) ) response_to_json = json.loads(response.text) access_token = response_to_json["access_token"] return access_token def get_metrics(token, metric, time_query, specify_org=None, specify_app=None, specify_api=None, printout=None): # error metric example # # hit rate metric example # # latency metric example # # Custom time frame # hits/apis?timerange=custom&startDate=2019-01-31&endDate=2019-02-28&sortorder=desc&sortby=hits # These two are mandatory, so we can add them without checking metric = metric + "/apis?" time_range = "timerange=%s&" %(time_query) # the following are optional, so need to check if they are supplied if specify_app is not None: app_id = "appIds=%s&" %(specify_app) else: app_id = "" if specify_api is not None: api_id = "apiIds=%s&" %(specify_api) else: api_id = "" # in the event that the Organisation is supplied, it needs to be added to the header if specify_org is None: headers = {'Authorization': "bearer " + token} else: headers = {"Authorization": "bearer " + token, "APIM-OrgUuid": specify_org} # construct the end point based of the above info endpoint = env[SET_ENV]["METRIC_BASE"] + \ metric + time_range + app_id + api_id # remove the "&" is there is one at the end if endpoint.endswith("&"): # add the sort option per metric if "hits" in metric: endpoint = endpoint + "sortorder=desc&sortby=hits" if "latency" in metric: endpoint = endpoint + "sortorder=desc&sortby=avg" if "errors" in metric: endpoint = endpoint + "sortorder=desc&sortby=errors" # endpoint = endpoint[:-1] # print(endpoint) # log request to file if requested if printout is not None: msg = "METRICS FUNCTION: REQUEST: ", str(endpoint) logging.info(msg) # actual call to Portal request = requests.get(endpoint, headers=headers, verify=False) response = json.loads(request.text) # log response to file if requested if printout is not None: msg = "METRICS FUNCTION: RESPONSE: ", str(response) logging.info(msg) return response def papi(token, metric, uuid=None): if metric == "organizations": # Returns the number of organisations endpoint = env[SET_ENV]["PAPI_BASE"] + "tenant-admin/1.0/organizations" if metric == "account_plans": endpoint = env[SET_ENV]["PAPI_BASE"] + "AccountPlans" if metric == "applications": # Returns a list of all applications, including org-uuid and friendly names endpoint = env[SET_ENV]["PAPI_BASE"] + "api-management/1.0/applications?size=1000" if metric == "app": # Returns a single applications with associated apis endpoint = env[SET_ENV]["PAPI_BASE"] + "Applications('%s')" % (uuid) if metric == "users": endpoint = env[SET_ENV]["PAPI_BASE"] + "Users" if metric == "api": endpoint = env[SET_ENV]["PAPI_BASE"] + "api-management/1.0/apis/%s" % (uuid) if metric == "api_group": endpoint = env[SET_ENV]["PAPI_BASE"] + "api-management/1.0/api-groups/%s/apis" % (uuid) # print(endpoint) headers = {'Authorization': "bearer " + token} request = requests.get(endpoint, headers=headers, verify=False) response = json.loads(request.text) return response def get_orgs_and_apps(applications): organisations = {} def rand_color(): return random.randint(0, 255) for i in range(len(applications["results"])): org_name = applications["results"][i]["organizationName"] org_uuid = applications["results"][i]["organizationUuid"] app_uuid = applications["results"][i]["uuid"] app_name = applications["results"][i]["name"] if org_name in organisations: organisations[org_name]["app_count"] += 1 organisations[org_name]["api_hits"] = {} organisations[org_name]["api_latency"] = {} organisations[org_name]["api_errors"] = {} # organisations[org_name]["apps"][app_name] = app_uuid # organisations[org_name]["apps"][app_name].update( organisations[org_name]["apps"][app_name] = {"app_guid": app_uuid} organisations[org_name]["apps"][app_name]["metrics"] = {} else: organisations[org_name] = {} organisations[org_name]["org_uuid"] = org_uuid organisations[org_name]["app_count"] = 1 organisations[org_name]["api_hits"] = {} organisations[org_name]["api_latency"] = {} organisations[org_name]["api_errors"] = {} organisations[org_name]["color"] = ("#%02X%02X%02x" % ( rand_color(), rand_color(), rand_color())) organisations[org_name]["apps"] = {} organisations[org_name]["apps"][app_name] = {"app_guid": app_uuid} organisations[org_name]["apps"][app_name]["metrics"] = {} # organisations[org_name]["apps"][app_name]["metrics"][] = {} # organisations[org_name]["apps"][app_name] = app_uuid # print(json.dumps(organisations)) for org, org_info in organisations.items(): # print(org) # for each org, query their apps in order to get app guids for app_name_, app_info in org_info["apps"].items(): # now lookup each app guid to get apis # print(app_name) try: all_apis = papi(token, "app", app_info["app_guid"])["ApiIds"]["results"] # apis_per_app = {} # apis_per_app[] = {} # some apis have api groups, so # 1. get the groups # 2. lookup the group # 3. find all apis in the group # 4. add apis to array api_groups = papi(token, "app", app_info["app_guid"])["ApiGroupIds"]["results"] if len(api_groups) > 0: for api_group in api_groups: list_of_apis_in_group = papi(token, "api_group", api_group) for each_api in list_of_apis_in_group: for uuid_k, uuid_v in each_api.items(): all_apis.append(uuid_v) except Exception as e: print("Issue while getting API GUID's:", e) continue # for date in ["day", "week", "month", "year"]: # try: # place_holder_dict = {} # for api in all_apis: # api_name = papi(token, "api", api)["name"] # organisations[org]["apps"][app_name_]["metrics"][api_name] = {} # logging.info(api_name) # api_hits = get_metrics(token, "hits", time_query=date, # specify_org=org_info["org_uuid"], specify_app=app_info["app_guid"], specify_api=api, printout=True)["data"]["hits"] # logging.info(api_hits) # organisations[org]["apps"][app_name_]["metrics"][api_name].update({date: api_hits}) for date in ["day", "week", "month", "year"]: # get hit rates per tenant org_level_hits = get_metrics(token, "hits", time_query=date, specify_org=org_info["org_uuid"], printout=True)["data"]["hits"] # get latency per tenant try: org_level_latency = get_metrics(token, "latency", time_query=date, specify_org=org_info["org_uuid"], printout=True)["data"]["avg"] except Exception as e: org_level_latency = 0 try: org_level_errors = get_metrics(token, "errors", time_query=date, specify_org=org_info["org_uuid"], printout=True)["data"]["errors"] except Exception as e: org_level_errors = 0 # set hit rates organisations[org]["api_hits"].update({date: org_level_hits}) # set latency organisations[org]["api_latency"].update({date: org_level_latency}) # set errors organisations[org]["api_errors"].update({date: org_level_errors}) for api in all_apis: try: api_name = papi(token, "api", api)["name"] organisations[org]["apps"][app_name_]["metrics"][api_name] = {} for date in ["day", "week", "month", "year"]: logging.info(api_name) api_hits = get_metrics(token, "hits", time_query=date, specify_org=org_info["org_uuid"], specify_app=app_info["app_guid"], specify_api=api, printout=True)["data"]["hits"] logging.info(api_hits) organisations[org]["apps"][app_name_]["metrics"][api_name].update({date: api_hits}) # print(json.dumps( # organisations[org_name]["apps"][app_name])) # print("updated", app_name, app_name_) # "RANGE": date, "API NAME": api_name, "API HITS": api_hits} # print(org, app_name, api_name, date, api_hits) # db.insert({"ORG": org, "APP": app_name, "TIMEFRAME": date, "HITRATE": api_hits}) # if n in apis_per_app[k]: # if date_query_2 in apis_per_app[k][n]: # apis_per_app[k][n][date_query_2][api_spec] = api_hits # # print("step 02:", apis_per_app) # else: # apis_per_app[k][n][date_query_2] = {} # # print("step 03:", apis_per_app) # print(date_query_2) # else: # apis_per_app[k][n] = {} except Exception as e: print("Issue in getting metrics:", e) continue # print(json.dumps(apis_per_app)) # for each_api in api_groups: # all_apis.append(each_api) # print(all_apis) return organisations # metrics = {} datetime = str(datetime.date.today()) SET_ENVS = ["PPE"] for SET_ENV in SET_ENVS: print(SET_ENV) token = get_token() applications = papi(token, "applications") orgs_and_apps = get_orgs_and_apps(applications) # print(json.dumps(orgs_and_apps)) output_file = os.path.join(dir_path, (datetime + "_" + SET_ENV + "_context_output.json")) with open(output_file, "w") as out: out.write(json.dumps(orgs_and_apps)) # -------------------------------------------------------------------------------------------------------- # datetime = datetime.replace(" ", "_") # datetime = datetime.replace(":", "-") dir_path = os.path.dirname(os.path.realpath(__file__)) # papi_json = os.path.join(dir_path, "context_output.json") db_path = os.path.join(dir_path, (datetime + "_" + SET_ENV + "_sqlite.db")) print(db_path) db_conn = sqlite3.connect(db_path) db_curs = db_conn.cursor() db_curs.execute(""" CREATE TABLE IF NOT EXISTS HIT_RATES( id INTEGER PRIMARY KEY, timestamp TEXT, metric TEXT, tenant TEXT, day TEXT, week TEXT, month TEXT, year TEXT ) """) db_conn.commit() with open(output_file, "r") as f: papi = json.load(f) for tenants, tenant_info in papi.items(): hit_rates = tenant_info["api_hits"] error_rates = tenant_info["api_errors"] latency_rates = tenant_info["api_latency"] # print(tenants, hit_rates["week"], hit_rates["month"]) insert_hits = "INSERT INTO HIT_RATES(timestamp, metric, tenant, day, week, month, year) VALUES('%s', '%s', '%s', '%s', '%s', '%s', '%s');" % ( datetime, "hits", tenants, hit_rates["day"], hit_rates["week"], hit_rates["month"], hit_rates["year"]) insert_latency = "INSERT INTO HIT_RATES(timestamp, metric, tenant, day, week, month, year) VALUES('%s', '%s', '%s', '%s', '%s', '%s', '%s');" % ( datetime, "latency", tenants, latency_rates["day"], latency_rates["week"], latency_rates["month"], latency_rates["year"]) insert_errors = "INSERT INTO HIT_RATES(timestamp, metric, tenant, day, week, month, year) VALUES('%s', '%s', '%s', '%s', '%s', '%s', '%s');" % ( datetime, "errors", tenants, error_rates["day"], error_rates["week"], error_rates["month"], error_rates["year"]) db_curs.execute(insert_hits) db_curs.execute(insert_latency) db_curs.execute(insert_errors) db_conn.commit() csv_command = 'sqlite3 -header -csv %s "select * from %s" > %s' % ( db_path, "HIT_RATES", os.path.join(dir_path, (datetime + "_" + SET_ENV + "_metrics.csv"))) print(csv_command) os.system(csv_command) # -------------------------------------------------------------------------------------- # SUBJECT = "%s APIM Billing" %(SET_ENV) # csv = os.path.join(dir_path, (datetime + "_" + SET_ENV + "_metrics.csv")) # recipients = [''] # msg = MIMEMultipart() # msg['Subject'] = SUBJECT # msg['From'] = "APIM Billing" # msg['To'] = ", ".join(recipients) # text = "Please see attached usage report for %s APIM" %(SET_ENV) # msg.attach(MIMEText(text)) # part = MIMEBase('application', "octet-stream") # part.set_payload(open(csv, "rb").read()) # # part.set_payload(open("2019-03-20_metrics.csv", "rb").read()) # Encoders.encode_base64(part) # # head = ('Content-Disposition', 'attachment; filename="%s"' % # # (datetime + "_metrics.csv")) # attach = 'attachment; filename="%s"' %(csv) # # part.add_header('Content-Disposition', # # 'attachment; filename="2019-03-20_metrics.csv"') # part.add_header('Content-Disposition', attach) # msg.attach(part) # # ----- FOR TESTING ----- # server = smtplib.SMTP("smtp.gmail.com", 587) # server.ehlo() # server.starttls() # server.login("", "") # server.sendmail("r", recipients, msg.as_string())