Jupyter Notebook SerpApi websearch

import requests
import os

API_KEY = os.environ["SERP_API"]

def search_and_save(query):
    url = "https://serpapi.com/search.json"

    params = {
        "q": query,
        "api_key": API_KEY
    }

    response = requests.get(url, params=params)
    data = response.json()

    results = data.get("organic_results", [])

    # Create folder if it doesn't exist
    folder = "./search_results"
    os.makedirs(folder, exist_ok=True)

    # Create filename (replace spaces with -)
    safe_query = query.replace(" ", "-").replace(".","")
    file_path = os.path.join(folder, f"{safe_query}.txt")

    # Write results to file
    with open(file_path, "w", encoding="utf-8") as f:
        for r in results:
            title = r.get("title", "")
            link = r.get("link", "")
            snippet = r.get("snippet", "")

            f.write(f"{title}\n")
            f.write(f"{link}\n")
            f.write(f"{snippet}\n")
            f.write("-" * 50 + "\n")

    print(f"Saved results to {file_path}")

# Example usage
search_and_save("Jupyter notebook tutorials.")