With AI and automation taking over the world by storm, it is now essential for a normal person to work on basic skillset. Python is the perfect language to start for this as it popular for its lightweight, easy to read syntaxes, and it is packed with libraries that make automation painless.
In this post, we will explore beginner level of five Python mini-projects that you can use right away to improve your workflow.
1. 🗂 Auto-Organize Your Downloads Folder
Problem: Cluttered downloads folder with files everywhere
Solution: Automatically sort files into categorized folders
import os
import shutil
from pathlib import Path
# Path to your Downloads folder
downloads_path = Path.home() / "Downloads"
# File type categories
file_types = {
"Images": [".jpg", ".jpeg", ".png", ".gif", ".svg"],
"Documents": [".pdf", ".docx", ".txt", ".pptx", ".xlsx"],
"Archives": [".zip", ".rar", ".7z", ".tar"],
"Videos": [".mp4", ".mov", ".avi", ".mkv"],
"Audio": [".mp3", ".wav", ".flac"],
"Code": [".py", ".js", ".html", ".css", ".json"]
}
for file in downloads_path.iterdir():
if file.is_file():
file_moved = False
for folder, extensions in file_types.items():
if file.suffix.lower() in extensions:
target_folder = downloads_path / folder
target_folder.mkdir(exist_ok=True)
shutil.move(str(file), str(target_folder / file.name))
print(f"✅ Moved {file.name} to {folder}/")
file_moved = True
break
# If file type doesn't match any category, put in "Other"
if not file_moved:
other_folder = downloads_path / "Other"
other_folder.mkdir(exist_ok=True)
shutil.move(str(file), str(other_folder / file.name))
print(f"📁 Moved {file.name} to Other/")
🎯 How to use:
- Save as organize_downloads.py
- Run with python organize_downloads.py
- Schedule to run daily with Task Scheduler/cron
2. ⏰ Daily Task Reminder in Terminal
Problem: Forgetting daily priorities
Solution: Get your task list automatically displayed when opening terminal
python
from datetime import datetime
import json
import os
# Define your daily tasks
tasks = [
"🌅 Review yesterday's notes and plan today",
"📧 Check and respond to urgent emails (15 min max)",
"🎯 Work on top priority project (90 min focus)",
"💧 Hydration break + stretch",
"📊 Afternoon review and plan tomorrow"
]
# Customizable header
print("\n" + "="*50)
print(f"📅 TODAY'S PLAN - {datetime.today().strftime('%A, %B %d, %Y')}")
print("="*50)
for i, task in enumerate(tasks, 1):
print(f"{i}. {task}")
print("\n💡 Tip: You've got this! One task at a time.")
print("="*50)
🎯 How to use:
- Windows: Add to PowerShell profile
- Mac/Linux: Add to ~/.bashrc or ~/.zshrc
- VS Code: Add to terminal startup commands
3. 🧠 Quick Notes to Markdown
Problem: Ideas lost because note-taking is cumbersome
Solution: One-command note capture with automatic formatting
python
from datetime import datetime
import os
# Create notes directory if it doesn't exist
notes_dir = Path.home() / "QuickNotes"
notes_dir.mkdir(exist_ok=True)
print("📝 Quick Note Capture")
print("Type your note (press Enter twice to finish):")
lines = []
while True:
try:
line = input()
if line == "" and lines and lines[-1] == "":
break
lines.append(line)
except EOFError:
break
note_content = "\n".join(lines[:-1]) # Remove the last empty line
if note_content.strip():
# Create filename with timestamp
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
filename = notes_dir / f"note_{timestamp}.md"
# Write formatted markdown
with open(filename, "w", encoding="utf-8") as f:
f.write(f"# Note - {datetime.now().strftime('%A, %B %d, %Y at %H:%M')}\n\n")
f.write(note_content)
f.write(f"\n\n---\n*Captured automatically*")
print(f"✅ Note saved to: {filename}")
else:
print("❌ No content entered. Note not saved.")
🎯 How to use:
- Save as quick_note.py
- Create alias: alias note=”python /path/to/quick_note.py”
- Run note anytime to capture ideas
4. 📊 Instant CSV to Summary
Problem: Spending too much time opening spreadsheets for quick insights
Solution: Command-line data summary in seconds
python
import pandas as pd
import numpy as np
from pathlib import Path
def analyze_csv(filename):
try:
# Read CSV file
df = pd.read_csv(filename)
print("\n" + "="*60)
print(f"📊 DATA ANALYSIS: {filename}")
print("="*60)
# Basic info
print(f"📏 Shape: {df.shape[0]} rows × {df.shape[1]} columns")
print(f"🔑 Columns: {', '.join(df.columns)}")
# Data types
print("\n📋 Data Types:")
for col in df.columns:
dtype = str(df[col].dtype)
unique_count = df[col].nunique()
print(f" • {col}: {dtype} ({unique_count} unique values)")
# Numerical summary
numeric_cols = df.select_dtypes(include=[np.number]).columns
if not numeric_cols.empty:
print(f"\n🧮 Numerical Summary:")
print(df[numeric_cols].describe().round(2))
# Missing values
missing = df.isnull().sum()
if missing.sum() > 0:
print(f"\n⚠️ Missing Values:")
for col, count in missing.items():
if count > 0:
print(f" • {col}: {count} missing ({count/len(df)*100:.1f}%)")
else:
print(f"\n✅ No missing values found")
print("="*60)
except FileNotFoundError:
print(f"❌ File '{filename}' not found!")
except pd.errors.EmptyDataError:
print("❌ File is empty!")
except Exception as e:
print(f"❌ Error reading file: {e}")
# Main execution
if __name__ == "__main__":
filename = input("Enter CSV filename (or path): ").strip()
analyze_csv(filename)
🎯 How to use:
bash
python csv_analyzer.py
Enter CSV filename: data/sales.csv
5. 🌐 Fast Website Status Checker
Problem: Manually checking if websites/services are online
Solution: Bulk status monitoring with one command
python
import requests
import time
from datetime import datetime
def check_website(url, timeout=5):
"""Check if a website is accessible"""
try:
start_time = time.time()
response = requests.get(url, timeout=timeout, headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
})
response_time = round((time.time() - start_time) * 1000, 2)
if response.status_code == 200:
return "✅ UP", response.status_code, response_time
else:
return "⚠️ ISSUE", response.status_code, response_time
except requests.exceptions.Timeout:
return "⏰ TIMEOUT", "N/A", timeout*1000
except requests.exceptions.ConnectionError:
return "❌ DOWN", "N/A", "N/A"
except Exception as e:
return "🚫 ERROR", str(e), "N/A"
# Websites to monitor
websites = [
"https://google.com",
"https://github.com",
"https://stackoverflow.com",
"https://docs.python.org",
"https://your-app.com",
"https://api.yourservice.com"
]
print(f"\n🌐 WEBSITE STATUS CHECK - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print("="*70)
results = []
for url in websites:
status, code, response_time = check_website(url)
results.append((url, status, code, response_time))
print(f"{status} {url}")
print(f" Status: {code} | Response Time: {response_time}ms")
time.sleep(1) # Be nice to servers
print("="*70)
# Summary
up_count = sum(1 for _, status, _, _ in results if status == "✅ UP")
print(f"📈 Summary: {up_count}/{len(websites)} websites operational")
# Save log
log_file = "website_status.log"
with open(log_file, "a") as f:
f.write(f"\n{datetime.now().strftime('%Y-%m-%d %H:%M')} - {up_count}/{len(websites)} up\n")
print(f"📝 Log saved to: {log_file}")
🎯 How to use:
- Customize the websites list with your URLs
- Run with python status_checker.py
- Schedule for automated monitoring
🚀 Taking It Further
Automate Execution:
bash
#Windows Task Scheduler
# Create daily task for organize_downloads.py
# Mac/Linux crontab (run every day at 9 AM)
0 9 * * * /usr/bin/python3 /path/to/organize_downloads.py
# Monitor websites every hour
0 * * * * /usr/bin/python3 /path/to/status_checker.py
Combine Scripts:
Create a master script that runs all automations:
python
# master_automation.py
import organize_downloads
import status_checker
def main():
print("🚀 Running daily automations...")
organize_downloads.main()
status_checker.main()
print("✅ All automations completed!")
if __name__ == "__main__":
main()
💡 Final Thoughts
These scripts demonstrate how a few lines of Python can save hours of manual work each week. The real power comes from:
- Customization: Adapt them to your specific needs
- Integration: Combine them into workflows
- Scheduling: Make them run automatically
- Sharing: Help your team work smarter
Start small: Pick one script that solves your most annoying repetitive task. Run it manually for a week, then automate it. Build from there!
Which automation will you implement first? Share your experience or suggest other useful scripts in the comments below