Daily Case Law Alerts
Build a Python script that monitors specific legal topics and sends email alerts when new case law is published.
What you'll build
Staying current with case law developments is critical for legal professionals. Instead of manually checking court databases every morning, you can automate the process with a simple Python script that queries the Moonlit API for new decisions. This script runs a keyword search sorted by date, filters for documents published since yesterday, and sends an email digest with the new results. You can schedule it with cron (Linux/macOS) or Task Scheduler (Windows) to run every morning. The pattern is simple enough to extend: swap in semantic search for broader monitoring, add multiple queries for different practice areas, or push alerts to Slack or Microsoft Teams instead of email.
Architecture
┌─────────┐ ┌────────────────┐ ┌───────────────┐ ┌──────────────┐ │ Cron │───▶│ Python Script │───▶│ Moonlit API │───▶│ Email Digest │ │ (daily)│ │ alert.py │ │ keyword_search│ │ or Slack │ └─────────┘ └────────────────┘ └───────────────┘ └──────────────┘
Prerequisites
- A Moonlit API key
- Python 3.8+ installed
- requests library (pip install requests)
Step-by-step
Define your monitoring topics
Create a configuration list of topics you want to monitor. Each topic has a name, a search query (using Boolean operators for precision), and optional jurisdiction and source filters.
import os
from datetime import datetime, timedelta
API_KEY = os.environ["MOONLIT_API_KEY"]
BASE_URL = "https://api.moonlit.ai/v1.1"
TOPICS = [
{
"name": "GDPR Enforcement",
"query": "GDPR AND (fine OR penalty OR enforcement)",
"jurisdictions": ["European Union", "Netherlands"],
},
{
"name": "AI Regulation",
"query": "\"artificial intelligence\" OR \"AI Act\" OR \"algorithmic decision\"",
"jurisdictions": ["European Union"],
},
{
"name": "Employment Dismissal",
"query": "ontslag AND (kennelijk onredelijk OR transitievergoeding)",
"jurisdictions": ["Netherlands"],
"sources": ["Hoge Raad", "Gerechtshof Amsterdam"],
},
]Search for new case law
For each topic, query the keyword search endpoint with a date filter for yesterday. Sort by date descending to get the most recent results first.
import requests
def search_new_cases(topic: dict) -> list:
yesterday = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d")
today = datetime.now().strftime("%Y-%m-%d")
payload = {
"query": topic["query"],
"jurisdictions": topic.get("jurisdictions", []),
"sources": topic.get("sources", []),
"documentTypes": ["case_law"],
"from_date": yesterday,
"until_date": today,
"sort_type": 1, # date descending
"num_results": 20,
}
response = requests.post(
f"{BASE_URL}/search/keyword_search",
headers={
"Ocp-Apim-Subscription-Key": API_KEY,
"Content-Type": "application/json",
},
json=payload,
)
response.raise_for_status()
return response.json()["result"]["results"]Format and send the alert
Collect results from all topics into a digest and send it via email using Python's built-in smtplib. You can swap this for a Slack webhook, Teams connector, or any notification service.
import smtplib
from email.mime.text import MIMEText
def build_digest(all_results: dict) -> str:
lines = ["Daily Legal Alert Digest", "=" * 40, ""]
for topic_name, cases in all_results.items():
lines.append(f"## {topic_name} ({len(cases)} new)")
lines.append("")
for case in cases:
lines.append(f" {case['identifier']}")
lines.append(f" {case['title']}")
lines.append(f" {case['source']} -- {case['date']}")
lines.append(f" {case.get('summary', 'No summary available.')[:200]}")
lines.append("")
lines.append("-" * 40)
return "\n".join(lines)
def send_email(digest: str):
msg = MIMEText(digest)
msg["Subject"] = f"Legal Alert -- {datetime.now().strftime('%Y-%m-%d')}"
msg["From"] = "alerts@yourfirm.com"
msg["To"] = "team@yourfirm.com"
with smtplib.SMTP("smtp.yourfirm.com", 587) as server:
server.starttls()
server.login("alerts@yourfirm.com", os.environ["SMTP_PASSWORD"])
server.send_message(msg)Schedule the script
Add a cron job to run the script every weekday morning at 7:00 AM. The script collects results for all topics and sends a single digest email.
# Add to crontab (crontab -e)
0 7 * * 1-5 cd /path/to/project && /usr/bin/python3 alert.pyComplete Code
#!/usr/bin/env python3
"""Daily legal alert script using Moonlit API."""
import os
import smtplib
from datetime import datetime, timedelta
from email.mime.text import MIMEText
import requests
API_KEY = os.environ["MOONLIT_API_KEY"]
BASE_URL = "https://api.moonlit.ai/v1.1"
TOPICS = [
{
"name": "GDPR Enforcement",
"query": "GDPR AND (fine OR penalty OR enforcement)",
"jurisdictions": ["European Union", "Netherlands"],
},
{
"name": "AI Regulation",
"query": "\"artificial intelligence\" OR \"AI Act\" OR \"algorithmic decision\"",
"jurisdictions": ["European Union"],
},
{
"name": "Employment Dismissal",
"query": "ontslag AND (kennelijk onredelijk OR transitievergoeding)",
"jurisdictions": ["Netherlands"],
"sources": ["Hoge Raad", "Gerechtshof Amsterdam"],
},
]
def search_new_cases(topic: dict) -> list:
yesterday = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d")
today = datetime.now().strftime("%Y-%m-%d")
payload = {
"query": topic["query"],
"jurisdictions": topic.get("jurisdictions", []),
"sources": topic.get("sources", []),
"documentTypes": ["case_law"],
"from_date": yesterday,
"until_date": today,
"sort_type": 1,
"num_results": 20,
}
response = requests.post(
f"{BASE_URL}/search/keyword_search",
headers={
"Ocp-Apim-Subscription-Key": API_KEY,
"Content-Type": "application/json",
},
json=payload,
)
response.raise_for_status()
return response.json()["result"]["results"]
def build_digest(all_results: dict) -> str:
lines = ["Daily Legal Alert Digest", "=" * 40, ""]
total = sum(len(v) for v in all_results.values())
lines.append(f"Total new decisions: {total}")
lines.append(f"Date: {datetime.now().strftime('%A, %d %B %Y')}")
lines.append("")
for topic_name, cases in all_results.items():
lines.append(f"## {topic_name} ({len(cases)} new)")
lines.append("")
if not cases:
lines.append(" No new decisions.")
lines.append("")
continue
for case in cases:
lines.append(f" {case['identifier']}")
lines.append(f" {case['title']}")
lines.append(f" {case['source']} -- {case['date']}")
summary = case.get("summary", "No summary available.")
lines.append(f" {summary[:200]}")
lines.append("")
lines.append("-" * 40)
return "\n".join(lines)
def send_email(digest: str):
msg = MIMEText(digest)
msg["Subject"] = f"Legal Alert -- {datetime.now().strftime('%Y-%m-%d')}"
msg["From"] = "alerts@yourfirm.com"
msg["To"] = "team@yourfirm.com"
with smtplib.SMTP("smtp.yourfirm.com", 587) as server:
server.starttls()
server.login("alerts@yourfirm.com", os.environ["SMTP_PASSWORD"])
server.send_message(msg)
def main():
all_results = {}
for topic in TOPICS:
cases = search_new_cases(topic)
all_results[topic["name"]] = cases
print(f"[{topic['name']}] {len(cases)} new decisions")
digest = build_digest(all_results)
if any(all_results.values()):
send_email(digest)
print("Alert email sent.")
else:
print("No new decisions -- no email sent.")
if __name__ == "__main__":
main()