SCIOPS was a newsletter on politics, technology, memetics and magic

I wrote SCIOPS weekly(ish) for several years. I stopped writing it around the beginning of the 2020 pandemic, as my worldview started rapidly changing to keep up with events. I no longer stand by all the opinions in these letters, but I want to archive them here in case Tinyletter disappears one day.

The following is code for downloading the letters and converting them to markdown files in the _posts folder.

import base64
import datetime
import os.path

from bs4 import BeautifulSoup
import jinja2
import requests
import tinyapi
from getpass import getpass
from slugify import slugify
from markdownify import markdownify as md

session = tinyapi.Session('sciops', getpass())
messages = session.get_messages(order='sent_at desc', content=True)
def download_images(soup):
    imgs = soup.find_all('img')
    for img in imgs:
        if not img.has_attr('src') or img['src'].startswith('data'):
                continue
        img_data = requests.get(img['src'])
        img_data_uri = 'data:{};base64,{}'.format(
            img_data.headers['Content-Type'],
            base64.b64encode(img_data.content).decode('utf-8'))
        img['src'] = img_data_uri
def get_frontmatter(title, desc):
    short_desc = desc.split('.')[0].replace('\"','\'')
    frontmatter = f'''---
title: "{title}"
description: "{short_desc}"
layout: post
toc: false
comments: false
search_exclude: false
categories: [sciops]
---

'''
    return(frontmatter)
def get_slug(title):
    short_title = '-'.join(slugify(title).split('-')[3:])
    if len(short_title) < 1:
        short_title = 'letter'
    return(short_title)
def archive_message(message):
    title = message['subject']
    desc = message['snippet']
    sent_date = datetime.datetime.fromtimestamp(message['sent_at'])
    sent_date_str = sent_date.strftime('%Y-%m-%d')
    filename = f'../_posts/sciops/{sent_date_str}-{get_slug(title)}.md'
#     if os.path.exists(filename):
#         return

    html_doc = message['content']['html']
    soup = BeautifulSoup(html_doc, 'html.parser')
    download_images(soup)
    message_body = soup.prettify()
    
    frontmatter = get_frontmatter(title, desc)
    body = frontmatter + md(message_body)
    with open(filename, 'w', encoding='utf-8') as f:
        f.write(body)
from tqdm import tqdm
for message in tqdm(messages):
    archive_message(message)
100%|██████████| 138/138 [00:35<00:00,  3.91it/s]