mgx

python script to export mailgun permanent failure logs

My initial approach was inspired by a Stack Overflow answer posted by wolfplusplus. Their script showed how to retrieve Mailgun logs in JSON format. I modified the script to:

import os
import csv 
import requests
from datetime import datetime, timedelta
from email import utils

DAYS_TO_GET = 7
MAILGUN_API_KEY = 'env'
MAILGUN_SERVER = 'env'
if not MAILGUN_API_KEY or not MAILGUN_SERVER:
    print("Set variables MAILGUN_API_KEY and MAILGUN_SERVER")
    exit(1)

ITEMS_PER_PAGE = 300  # API is limited to 300

def get_logs(start_date, next_url=None):
    if next_url:
        print(f"Getting next batch of {ITEMS_PER_PAGE} from {next_url}...")
        response = requests.get(next_url, auth=("api", MAILGUN_API_KEY))
    else:
        url = 'https://api.mailgun.net/v3/{0}/events'.format(MAILGUN_SERVER)
        start_date_formatted = utils.format_datetime(start_date)  # Mailgun wants it in RFC 2822
        print(f"Getting first batch of {ITEMS_PER_PAGE} from {url} since {start_date_formatted}...")
        response = requests.get(
            url,
            auth=("api", MAILGUN_API_KEY),
            params={
                "begin": start_date_formatted,
                "ascending": "yes",
                "pretty": "yes",
                "limit": ITEMS_PER_PAGE,
                "event": "failed",
                "severity": "permanent"
            }
        )
    response.raise_for_status()
    return response.json()

start = datetime.now() - timedelta(DAYS_TO_GET)
email_ids = []
current_page = get_logs(start)

while current_page.get('items'):
    items = current_page.get('items')
    # Extract email IDs and add them to the list
    email_ids.extend([item.get('envelope', {}).get('targets', '') for item in items])
    print(f"Retrieved email IDs for a total of {len(email_ids)}")
    next_url = current_page.get('paging').get('next', None)
    current_page = get_logs(start, next_url=next_url)

# Save as CSV
file_out = f"mailgun-email-ids-{MAILGUN_SERVER}_{start.strftime('%Y-%m-%d')}_to_{datetime.now().strftime('%Y-%m-%d')}.csv"
print(f"Writing out {file_out}")

# Open the file using csv.writer and write the header and email IDs
with open(file_out, 'w', newline='') as file_out_handle:
    writer = csv.writer(file_out_handle)
    writer.writerow(['emails'])
    writer.writerows([[email] for email in email_ids])

print("Done.")

^ This is the basic script. An extended version stores it elsewhere in a different format to provide a better view on Metabase.