<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:content="http://purl.org/rss/1.0/modules/content/">
<channel>
  <title>Kolmogorov - Articles</title>
  <link>https://kolm.ai/articles</link>
  <description>Engineering essays from Kolmogorov: AI compilers, verified inference, on-device HIPAA, the .kolm artifact format, and recipe-driven speculative decoding.</description>
  <language>en-us</language>
  <lastBuildDate>Wed, 07 May 2026 12:00:00 GMT</lastBuildDate>
  <atom:link href="https://kolm.ai/articles/rss.xml" rel="self" type="application/rss+xml" />

  <item>
    <title>How to Compile GPT-5 Into a 4GB Local Model</title>
    <link>https://kolm.ai/articles/ai-compiler</link>
    <guid isPermaLink="true">https://kolm.ai/articles/ai-compiler</guid>
    <pubDate>Thu, 07 May 2026 12:00:00 GMT</pubDate>
    <description>An AI compiler is the missing build step between a frontier model API and a local artifact. This essay explains what it does, why it changes the unit economics of AI, and how to compile your first .kolm in five minutes.</description>
    <category>AI compiler</category>
    <category>distillation</category>
    <category>on-device</category>
  </item>

  <item>
    <title>K-sample Verified Inference: A Practical Alternative to ZK-ML</title>
    <link>https://kolm.ai/articles/k-sample-verified-inference</link>
    <guid isPermaLink="true">https://kolm.ai/articles/k-sample-verified-inference</guid>
    <pubDate>Thu, 07 May 2026 12:00:00 GMT</pubDate>
    <description>K-sample verified inference produces cryptographically auditable model outputs without the cost of zero-knowledge proofs. The same mechanism powers every label inside a .kolm artifact.</description>
    <category>verified inference</category>
    <category>audit</category>
    <category>receipts</category>
  </item>

  <item>
    <title>HIPAA-Aligned AI on a Laptop: Keep PHI Local by Design</title>
    <link>https://kolm.ai/articles/hipaa-on-device</link>
    <guid isPermaLink="true">https://kolm.ai/articles/hipaa-on-device</guid>
    <pubDate>Thu, 07 May 2026 12:00:00 GMT</pubDate>
    <description>A practical architecture playbook for keeping protected health information local when evaluating AI assistants for healthcare workflows.</description>
    <category>HIPAA</category>
    <category>clinical</category>
    <category>on-device</category>
  </item>

  <item>
    <title>The .kolm File Format: One Artifact, Seven Components, Signed</title>
    <link>https://kolm.ai/articles/kolm-file-format</link>
    <guid isPermaLink="true">https://kolm.ai/articles/kolm-file-format</guid>
    <pubDate>Thu, 07 May 2026 12:00:00 GMT</pubDate>
    <description>A field-by-field walkthrough of the .kolm file format: base model, LoRA adapter, recipe pack, recall index, verifier, held-out tests, manifest, signature. The container behind portable AI artifacts.</description>
    <category>.kolm</category>
    <category>spec</category>
    <category>format</category>
  </item>

  <item>
    <title>Speculative Decoding With Deterministic Drafts: Free LLM Inference</title>
    <link>https://kolm.ai/articles/speculative-decoding-recipes</link>
    <guid isPermaLink="true">https://kolm.ai/articles/speculative-decoding-recipes</guid>
    <pubDate>Thu, 07 May 2026 12:00:00 GMT</pubDate>
    <description>A draft model is the standard way to do speculative decoding. A deterministic recipe pack is faster, smaller, free at runtime, and verifiably correct. Here is how to use one to cut your local LLM bill to zero.</description>
    <category>speculative decoding</category>
    <category>drafts</category>
    <category>runtime</category>
  </item>
</channel>
</rss>
