import tkinter as tk
from tkinter import ttk, messagebox, scrolledtext
import requests
from urllib.parse import urlparse
from bs4 import BeautifulSoup
import re
import threading
import time
from datetime import datetime
import json
class SEOAnalyzer:
def __init__(self, root):
self.root = root
self.root.title("Website SEO Analyzer")
self.root.geometry("900x700")
self.root.configure(bg='#f0f0f0')
# Set up the style
self.setup_styles()
# Create the interface
self.create_widgets()
def setup_styles(self):
self.style = ttk.Style()
self.style.configure('Title.TLabel', font=('Arial', 16, 'bold'), background='#f0f0f0')
self.style.configure('Header.TLabel', font=('Arial', 12, 'bold'), background='#f0f0f0')
self.style.configure('Normal.TLabel', font=('Arial', 10), background='#f0f0f0')
self.style.configure('Good.TLabel', font=('Arial', 10), background='#f0f0f0', foreground='green')
self.style.configure('Warning.TLabel', font=('Arial', 10), background='#f0f0f0', foreground='orange')
self.style.configure('Error.TLabel', font=('Arial', 10), background='#f0f0f0', foreground='red')
self.style.configure('Analyze.TButton', font=('Arial', 12, 'bold'))
def create_widgets(self):
# Main frame
main_frame = ttk.Frame(self.root, padding="20")
main_frame.pack(fill=tk.BOTH, expand=True)
# Title
title_label = ttk.Label(main_frame, text="Website SEO Analyzer", style='Title.TLabel')
title_label.pack(pady=(0, 20))
# URL input section
url_frame = ttk.Frame(main_frame)
url_frame.pack(fill=tk.X, pady=(0, 20))
ttk.Label(url_frame, text="Enter Website URL:", style='Header.TLabel').pack(anchor=tk.W)
url_input_frame = ttk.Frame(url_frame)
url_input_frame.pack(fill=tk.X, pady=(5, 0))
self.url_var = tk.StringVar(value="https://")
url_entry = ttk.Entry(url_input_frame, textvariable=self.url_var, font=('Arial', 11), width=50)
url_entry.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=(0, 10))
self.analyze_btn = ttk.Button(url_input_frame, text="Analyze SEO", command=self.start_analysis, style='Analyze.TButton')
self.analyze_btn.pack(side=tk.RIGHT)
# Progress bar
self.progress = ttk.Progressbar(main_frame, mode='indeterminate')
self.progress.pack(fill=tk.X, pady=(0, 20))
# Results notebook (tabbed interface)
self.notebook = ttk.Notebook(main_frame)
self.notebook.pack(fill=tk.BOTH, expand=True)
# Overview tab
self.overview_frame = ttk.Frame(self.notebook)
self.notebook.add(self.overview_frame, text="Overview")
# Content tab
self.content_frame = ttk.Frame(self.notebook)
self.notebook.add(self.content_frame, text="Content Analysis")
# Technical tab
self.technical_frame = ttk.Frame(self.notebook)
self.notebook.add(self.technical_frame, text="Technical SEO")
# Results tab
self.results_frame = ttk.Frame(self.notebook)
self.notebook.add(self.results_frame, text="Detailed Results")
# Initialize result displays
self.setup_overview_tab()
self.setup_content_tab()
self.setup_technical_tab()
self.setup_results_tab()
def setup_overview_tab(self):
# SEO Score
score_frame = ttk.LabelFrame(self.overview_frame, text="SEO Score", padding="10")
score_frame.pack(fill=tk.X, pady=(0, 10))
self.score_label = ttk.Label(score_frame, text="Not analyzed yet", font=('Arial', 24, 'bold'))
self.score_label.pack()
# Key metrics frame
metrics_frame = ttk.LabelFrame(self.overview_frame, text="Key Metrics", padding="10")
metrics_frame.pack(fill=tk.BOTH, expand=True)
# Create a canvas and scrollbar for metrics
canvas = tk.Canvas(metrics_frame, bg='white')
scrollbar = ttk.Scrollbar(metrics_frame, orient="vertical", command=canvas.yview)
self.scrollable_metrics_frame = ttk.Frame(canvas)
self.scrollable_metrics_frame.bind(
"<Configure>",
lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
)
canvas.create_window((0, 0), window=self.scrollable_metrics_frame, anchor="nw")
canvas.configure(yscrollcommand=scrollbar.set)
canvas.pack(side="left", fill="both", expand=True)
scrollbar.pack(side="right", fill="y")
# Metrics will be added dynamically
self.metric_labels = {}
def setup_content_tab(self):
# Content analysis results
content_text_frame = ttk.Frame(self.content_frame)
content_text_frame.pack(fill=tk.BOTH, expand=True, padx=10, pady=10)
self.content_text = scrolledtext.ScrolledText(content_text_frame, wrap=tk.WORD, width=80, height=20)
self.content_text.pack(fill=tk.BOTH, expand=True)
self.content_text.config(state=tk.DISABLED)
def setup_technical_tab(self):
# Technical analysis results
tech_text_frame = ttk.Frame(self.technical_frame)
tech_text_frame.pack(fill=tk.BOTH, expand=True, padx=10, pady=10)
self.tech_text = scrolledtext.ScrolledText(tech_text_frame, wrap=tk.WORD, width=80, height=20)
self.tech_text.pack(fill=tk.BOTH, expand=True)
self.tech_text.config(state=tk.DISABLED)
def setup_results_tab(self):
# Detailed results
results_text_frame = ttk.Frame(self.results_frame)
results_text_frame.pack(fill=tk.BOTH, expand=True, padx=10, pady=10)
self.results_text = scrolledtext.ScrolledText(results_text_frame, wrap=tk.WORD, width=80, height=20)
self.results_text.pack(fill=tk.BOTH, expand=True)
self.results_text.config(state=tk.DISABLED)
def start_analysis(self):
url = self.url_var.get().strip()
if not url or url == "https://":
messagebox.showerror("Error", "Please enter a valid URL")
return
# Validate URL format
try:
result = urlparse(url)
if not all([result.scheme, result.netloc]):
messagebox.showerror("Error", "Please enter a valid URL with http:// or https://")
return
except:
messagebox.showerror("Error", "Invalid URL format")
return
# Disable button and start progress bar
self.analyze_btn.config(state=tk.DISABLED)
self.progress.start(10)
# Run analysis in separate thread to prevent GUI freezing
thread = threading.Thread(target=self.analyze_seo, args=(url,))
thread.daemon = True
thread.start()
def analyze_seo(self, url):
try:
# Initialize results dictionary
self.results = {
'url': url,
'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
'overview': {},
'content': {},
'technical': {},
'issues': []
}
# Fetch the webpage
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
# Perform various SEO analyses
self.analyze_basic_seo(soup, response)
self.analyze_content(soup)
self.analyze_technical(soup, response)
self.calculate_seo_score()
# Update GUI in main thread
self.root.after(0, self.display_results)
except requests.RequestException as e:
self.root.after(0, lambda: messagebox.showerror("Error", f"Failed to fetch website: {str(e)}"))
except Exception as e:
self.root.after(0, lambda: messagebox.showerror("Error", f"An error occurred: {str(e)}"))
finally:
self.root.after(0, self.analysis_complete)
def analyze_basic_seo(self, soup, response):
# Title analysis
title_tag = soup.find('title')
title = title_tag.string.strip() if title_tag else "Not found"
title_length = len(title) if title_tag else 0
self.results['overview']['title'] = title
self.results['overview']['title_length'] = title_length
self.results['overview']['title_status'] = "Good" if 50 <= title_length <= 60 else "Needs improvement"
# Meta description analysis
meta_desc = soup.find('meta', attrs={'name': 'description'})
meta_content = meta_desc.get('content', '').strip() if meta_desc else "Not found"
meta_length = len(meta_content) if meta_desc else 0
self.results['overview']['meta_description'] = meta_content
self.results['overview']['meta_length'] = meta_length
self.results['overview']['meta_status'] = "Good" if 120 <= meta_length <= 160 else "Needs improvement"
# Heading analysis
headings = {'h1': [], 'h2': [], 'h3': []}
for tag in ['h1', 'h2', 'h3']:
elements = soup.find_all(tag)
headings[tag] = [elem.get_text().strip() for elem in elements]
self.results['overview']['headings'] = headings
self.results['overview']['h1_count'] = len(headings['h1'])
# Image analysis
images = soup.find_all('img')
images_with_alt = [img for img in images if img.get('alt')]
self.results['overview']['total_images'] = len(images)
self.results['overview']['images_with_alt'] = len(images_with_alt)
self.results['overview']['alt_text_percentage'] = (len(images_with_alt) / len(images) * 100) if images else 0
# Response time
self.results['overview']['response_time'] = response.elapsed.total_seconds()
def analyze_content(self, soup):
# Text content analysis
text = soup.get_text()
words = re.findall(r'\w+', text)
word_count = len(words)
self.results['content']['word_count'] = word_count
self.results['content']['content_status'] = "Good" if word_count >= 300 else "Too short"
# Keyword density (basic)
if word_count > 0:
word_freq = {}
for word in words:
word_lower = word.lower()
if len(word_lower) > 3: # Only consider words longer than 3 characters
word_freq[word_lower] = word_freq.get(word_lower, 0) + 1
# Get top 10 words
top_words = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)[:10]
self.results['content']['top_keywords'] = top_words
# Readability score (simplified)
sentences = re.split(r'[.!?]+', text)
sentence_count = len([s for s in sentences if len(s.strip()) > 0])
avg_sentence_length = word_count / sentence_count if sentence_count > 0 else 0
self.results['content']['sentence_count'] = sentence_count
self.results['content']['avg_sentence_length'] = avg_sentence_length
self.results['content']['readability'] = "Good" if avg_sentence_length <= 20 else "Could be improved"
def analyze_technical(self, soup, response):
# Mobile responsiveness (basic check)
viewport = soup.find('meta', attrs={'name': 'viewport'})
self.results['technical']['viewport'] = "Present" if viewport else "Missing"
# URL structure
url = self.results['url']
self.results['technical']['url_length'] = len(url)
self.results['technical']['has_https'] = url.startswith('https')
# Internal links analysis
links = soup.find_all('a', href=True)
internal_links = [link for link in links if link['href'].startswith('/') or url in link['href']]
external_links = [link for link in links if link not in internal_links]
self.results['technical']['total_links'] = len(links)
self.results['technical']['internal_links'] = len(internal_links)
self.results['technical']['external_links'] = len(external_links)
def calculate_seo_score(self):
score = 100 # Start with perfect score
# Deduct points based on issues
if self.results['overview']['title_length'] < 50 or self.results['overview']['title_length'] > 60:
score -= 10
self.results['issues'].append("Title length should be between 50-60 characters")
if self.results['overview']['meta_length'] < 120 or self.results['overview']['meta_length'] > 160:
score -= 10
self.results['issues'].append("Meta description should be between 120-160 characters")
if self.results['overview']['h1_count'] == 0:
score -= 15
self.results['issues'].append("No H1 tag found")
elif self.results['overview']['h1_count'] > 1:
score -= 5
self.results['issues'].append("Multiple H1 tags found")
if self.results['overview']['alt_text_percentage'] < 80:
score -= 10
self.results['issues'].append("Add alt text to images")
if self.results['content']['word_count'] < 300:
score -= 15
self.results['issues'].append("Content is too short (aim for 300+ words)")
if not self.results['technical']['has_https']:
score -= 20
self.results['issues'].append("Website should use HTTPS")
if self.results['technical']['viewport'] == "Missing":
score -= 10
self.results['issues'].append("Viewport meta tag is missing")
# Ensure score is between 0-100
score = max(0, min(100, score))
self.results['overview']['seo_score'] = score
def display_results(self):
# Update overview tab
self.update_overview_tab()
# Update content tab
self.update_content_tab()
# Update technical tab
self.update_technical_tab()
# Update detailed results tab
self.update_results_tab()
def update_overview_tab(self):
# Clear previous metrics
for widget in self.scrollable_metrics_frame.winfo_children():
widget.destroy()
# Update SEO score
score = self.results['overview']['seo_score']
color = 'green' if score >= 80 else 'orange' if score >= 60 else 'red'
self.score_label.config(text=f"{score}/100", foreground=color)
# Add metrics
metrics = [
("Website URL", self.results['url']),
("Analysis Date", self.results['timestamp']),
("", ""), # Separator
("Page Title", self.results['overview']['title']),
("Title Length", f"{self.results['overview']['title_length']} characters"),
("Title Status", self.results['overview']['title_status']),
("", ""),
("Meta Description", self.results['overview']['meta_description'][:100] + "..." if len(self.results['overview']['meta_description']) > 100 else self.results['overview']['meta_description']),
("Meta Length", f"{self.results['overview']['meta_length']} characters"),
("Meta Status", self.results['overview']['meta_status']),
("", ""),
("H1 Count", self.results['overview']['h1_count']),
("Total Images", self.results['overview']['total_images']),
("Images with Alt Text", f"{self.results['overview']['images_with_alt']} ({self.results['overview']['alt_text_percentage']:.1f}%)"),
("", ""),
("Response Time", f"{self.results['overview']['response_time']:.2f} seconds"),
]
for i, (label, value) in enumerate(metrics):
if label == "" and value == "":
# Add separator
ttk.Separator(self.scrollable_metrics_frame, orient='horizontal').pack(fill=tk.X, pady=5)
else:
metric_frame = ttk.Frame(self.scrollable_metrics_frame)
metric_frame.pack(fill=tk.X, pady=2)
ttk.Label(metric_frame, text=label + ":", style='Header.TLabel').pack(side=tk.LEFT)
ttk.Label(metric_frame, text=value, style='Normal.TLabel').pack(side=tk.RIGHT)
def update_content_tab(self):
self.content_text.config(state=tk.NORMAL)
self.content_text.delete(1.0, tk.END)
content = f"""CONTENT ANALYSIS REPORT
=======================
Word Count: {self.results['content']['word_count']} words
Status: {self.results['content']['content_status']}
Readability Analysis:
- Sentences: {self.results['content']['sentence_count']}
- Average sentence length: {self.results['content']['avg_sentence_length']:.1f} words
- Readability: {self.results['content']['readability']}
Top Keywords (by frequency):
"""
if 'top_keywords' in self.results['content']:
for word, freq in self.results['content']['top_keywords']:
content += f"- {word}: {freq} occurrences\n"
self.content_text.insert(1.0, content)
self.content_text.config(state=tk.DISABLED)
def update_technical_tab(self):
self.tech_text.config(state=tk.NORMAL)
self.tech_text.delete(1.0, tk.END)
technical = f"""TECHNICAL SEO ANALYSIS
=======================
Mobile Optimization:
- Viewport meta tag: {self.results['technical']['viewport']}
URL Structure:
- URL Length: {self.results['technical']['url_length']} characters
- HTTPS: {'Yes' if self.results['technical']['has_https'] else 'No'}
Link Analysis:
- Total Links: {self.results['technical']['total_links']}
- Internal Links: {self.results['technical']['internal_links']}
- External Links: {self.results['technical']['external_links']}
RECOMMENDATIONS:
"""
for issue in self.results['issues']:
if any(keyword in issue.lower() for keyword in ['https', 'viewport', 'link']):
technical += f"• {issue}\n"
self.tech_text.insert(1.0, technical)
self.tech_text.config(state=tk.DISABLED)
def update_results_tab(self):
self.results_text.config(state=tk.NORMAL)
self.results_text.delete(1.0, tk.END)
# Convert results to JSON-like format for display
results_json = json.dumps(self.results, indent=2, ensure_ascii=False)
self.results_text.insert(1.0, results_json)
self.results_text.config(state=tk.DISABLED)
def analysis_complete(self):
self.progress.stop()
self.analyze_btn.config(state=tk.NORMAL)
def main():
root = tk.Tk()
app = SEOAnalyzer(root)
root.mainloop()
if __name__ == "__main__":
main()
0 Comments