Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add lighthouse web core vitals benchmark score report #2

Merged
merged 1 commit into from
May 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions config/websites.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,16 @@
{
"name": "google",
"url": "https://www.google.com",
"screenshot": "../export/screenshots/google/20230525221903/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 22:19:04.png"
"screenshot": "../export/screenshots/google/20230525232204/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 23:22:05.png"
},
{
"name": "retasin",
"url": "https://www.retasin.com",
"screenshot": "../export/screenshots/retasin/20230525221913/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 22:19:14.png"
"screenshot": "../export/screenshots/retasin/20230525232236/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 23:22:38.png"
},
{
"name": "kodekeras",
"url": "https://kodekeras.my.id",
"screenshot": "../export/screenshots/kodekeras/20230525221926/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 22:19:27.png"
"screenshot": "../export/screenshots/kodekeras/20230525232319/screenshot_mockup_18_desktop_1600\u00d7900_1600x900_2023-05-25 23:23:20.png"
}
]
174 changes: 70 additions & 104 deletions ssind/lighthouse.py
Original file line number Diff line number Diff line change
@@ -1,112 +1,78 @@
import requests
import pandas as pd
from datetime import date

def webcorevitals(url_list, device, category, today):
df_list = []
for url in url_list['URL']:
print(url)

# Making API call for URL
response = requests.get("https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=" + url + "&strategy=" + device + "&category=" + category)

# Saving response as JSON
data = response.json()

print('Running URL #', url)

test = url
date = today

# Getting Metrics
try:
data = data['lighthouseResult']
except KeyError:
print('No Values')
data = 'No Values.'

# First Contentful Paint
try:
fcp = data['audits']['first-contentful-paint']['displayValue']
except KeyError:
print('No Values')
fcp = 0

# Largest Contentful Paint
try:
lcp = data['audits']['largest-contentful-paint']['displayValue']
except KeyError:
print('No Values')
lcp = 0

# Cumulative Layout Shift
try:
cls = data['audits']['cumulative-layout-shift']['displayValue']
except KeyError:
print('No Values')
cls = 0

try:
# Speed Index
si = data['audits']['speed-index']['displayValue']
except KeyError:
print('No Values')
si = 0

try:
# Time to Interactive
tti = data['audits']['interactive']['displayValue']
except KeyError:
print('No Values')
tti = 0

try:
# Total Blocking Time
tbt = data['audits']['total-blocking-time']['displayValue']
except KeyError:
print('No Values')
tbt = 0

try:
# Score
score = data['categories']['performance']['score']
except KeyError:
print('No Values')

# List with all values
values = [test, score, fcp, si, lcp, tti, tbt, cls, date]

# Create DataFrame using values list
df_score = pd.DataFrame([values], columns=['URL', 'Score', 'FCP', 'SI', 'LCP', 'TTI', 'TBT', 'CLS', 'Date'])

# Appending scores to empty df outside for loop
df_list.append(df_score)

# Concatenating list of dataframes into one
df = pd.concat(df_list)

# Removing 's' from LCP so we can get mean, also transforming it to float
df['LCP'] = df['LCP'].astype(str).str.replace(r's', '').astype(float)
df['FCP'] = df['FCP'].astype(str).str.replace(r's', '').astype(float)
df['SI'] = df['SI'].astype(str).str.replace(r's', '').astype(float)
df['TTI'] = df['TTI'].astype(str).str.replace(r's', '').astype(float)
df['TBT'] = df['TBT'].astype(str).str.replace(r'ms', '').str.replace(r',', '').astype(float)
df['Score'] = df['Score'].astype(float)
df['CLS'] = df['CLS'].astype(float)

# Save DataFrame as CSV
filename = today + '_all_scores.csv'
df.to_csv(filename)
print('File was saved in', filename)


# Load URL list from Excel file
url_list = pd.read_excel('urls.xlsx')

# Set device (mobile or desktop), category, and today's date
device = 'mobile'
category = 'performance'
today = date.today().strftime("%Y-%m-%d")

# Call webcorevitals function
webcorevitals(url_list, device, category, today)
def webcorevitals(url):
result = {}

# Making API call for URL
response = requests.get("https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=" + url + "&strategy=" + device + "&category=" + category)

# Saving response as JSON
data = response.json()

print('Running URL #', url)

result['URL'] = url
result['Date'] = today

# Getting Metrics
try:
data = data['lighthouseResult']
except KeyError:
print('No Values')
data = 'No Values.'

# First Contentful Paint
try:
result['FCP'] = data['audits']['first-contentful-paint']['displayValue']
except KeyError:
print('No Values')
result['FCP'] = 0

# Largest Contentful Paint
try:
result['LCP'] = data['audits']['largest-contentful-paint']['displayValue']
except KeyError:
print('No Values')
result['LCP'] = 0

# Cumulative Layout Shift
try:
result['CLS'] = data['audits']['cumulative-layout-shift']['displayValue']
except KeyError:
print('No Values')
result['CLS'] = 0

try:
# Speed Index
result['SI'] = data['audits']['speed-index']['displayValue']
except KeyError:
print('No Values')
result['SI'] = 0

try:
# Time to Interactive
result['TTI'] = data['audits']['interactive']['displayValue']
except KeyError:
print('No Values')
result['TTI'] = 0

try:
# Total Blocking Time
result['TBT'] = data['audits']['total-blocking-time']['displayValue']
except KeyError:
print('No Values')
result['TBT'] = 0

try:
# Score
result['Score'] = data['categories']['performance']['score']
except KeyError:
print('No Values')

return result
32 changes: 19 additions & 13 deletions ssind/ssind.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from selenium.webdriver.chrome.options import Options
from tqdm import tqdm
from PIL import Image, ImageDraw
import lighthouse


# yay -S wkhtmltopdf python-pip

Expand Down Expand Up @@ -94,10 +96,12 @@ def capture_screenshots(clear, config, report):

# Log the website status and loading time in the terminal and log file
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
log_entry = f"{timestamp} - {name} ({url}): {status_code}, Loading Time: {loading_time}"
Score = lighthouse.webcorevitals(url)
log_entry = f"{timestamp} - {name} ({url}): {status_code}, Loading Time: {loading_time}, Score: {Score}"
# print(log_entry)
log_file.write(log_entry + '\n')



driver.get(url)
driver.implicitly_wait(10) # Wait for the page to load completely
Expand Down Expand Up @@ -143,18 +147,22 @@ def capture_screenshots(clear, config, report):
mockup_folder = os.path.join('mockups', mockup_path)
add_mockup_to_screenshot(screenshot_path, mockup_folder, screenshot_mockup_path)

# Risize
# Save the screenshot paths to a list
screenshot_paths.append(screenshot_mockup_path)

# Open the screenshot image
screenshot = Image.open(screenshot_path)

# Resize the captured screenshots
for screenshot_path in screenshot_paths:

# Resize the image to the effective resolution
resized_screenshot = screenshot.resize((int(effective_width), int(effective_height)))

# Resize the image to the effective resolution
resized_screenshot = screenshot.resize((int(effective_width), int(effective_height)))

# Save the resized image
resized_screenshot.save(f"../export/resized_screenshots/{platform}_{name}_resized.png")
# Save the resized image
resized_screenshot.save(f"../export/resized_screenshots/{index}_{platform}_{name}_resized.png")

# Save the screenshot paths to a list
screenshot_paths.append(screenshot_mockup_path)


# screenshot = screenshot_paths[-1] if screenshot_paths else "" # Update the 'screenshot' field with the last screenshot path or empty string

Expand Down Expand Up @@ -236,8 +244,8 @@ def clear_screenshots_folder():

def clear_log_file():
# Clear the content of the log file
open('website_status.log', 'w').close()
print("Log file cleared.")
# open('../export/website_status.log', 'w').close()
print("Log file deative to clear.")

def generate_pdf_report(base_directory):

Expand Down Expand Up @@ -342,8 +350,6 @@ def calculate_pixel_density(screen_width, screen_height, physical_width, physica

return pixel_density



def main():
click.echo(BANNER)
capture_screenshots()
Expand Down