Skip to content

Instantly share code, notes, and snippets.

@benrudhart
Last active April 16, 2026 06:12
Show Gist options
  • Select an option

  • Save benrudhart/e6ef89d068e351acdd3bf895d036d4fb to your computer and use it in GitHub Desktop.

Select an option

Save benrudhart/e6ef89d068e351acdd3bf895d036d4fb to your computer and use it in GitHub Desktop.
ActorRunnable: Generic Swift protocol to batch actor operations into a single suspension point (eliminates interleaving race conditions)

ActorRunnable

A Swift protocol that batches multiple actor operations into a single suspension point — eliminating interleaving race conditions and reducing scheduling overhead.

Inspired by this Point-Free tweet about reducing await proliferation on actors.

The Problem

Each await on an actor is a suspension point. Between suspension points, other tasks can interleave and mutate actor state:

// 3 suspension points — state can change between each await
let token = await session.token        // ← another task can run here
let expires = await session.expiresAt  // ← and here
let active = await session.isRefreshing

The Solution

protocol ActorRunnable: Actor {
    func run<T: Sendable>(_ body: @Sendable (isolated Self) throws -> T) rethrows -> T
}

Conform any actor, then batch reads/writes into a single await:

actor SessionManager: ActorRunnable {
    var token: String?
    var expiresAt: Date?
    var isRefreshing = false
}

// 1 suspension point — atomic access, no interleaving
let snapshot = await session.run { iso in
    (iso.token, iso.expiresAt, iso.isRefreshing)
}

Performance

Benchmarked with ContinuousClock on Apple Silicon (Swift 6, Swift Testing):

Scenario Individual (10 awaits/iter) Batched (2 awaits/iter) Speedup
Sequential — 10k iterations 6.2 ms 4.1 ms 1.5×
Concurrent — 50 tasks × 200 iterations 25.2 ms 10.1 ms 2.5×

Under contention the speedup grows because each suspension point is a scheduling event where other tasks compete for the actor's executor.

The primary benefit is correctness, not speedrun guarantees atomic access to actor state within the closure. No interleaving, no race conditions.

Usage

  1. Add ActorRunnable.swift to your project
  2. Conform your actor: actor MyStore: ActorRunnable { ... }
  3. Use await store.run { iso in ... } instead of multiple await calls

Requires Swift 6.

/// Batches multiple synchronous operations on an actor into a single suspension point.
///
/// Conforming actors gain a generic ``run(_:)`` method that accepts a closure
/// receiving `isolated Self`. The closure body has synchronous access to all
/// actor-isolated state — only the outer `run` call requires `await`.
///
/// ```swift
/// actor Store: ActorRunnable {
/// var count = 0
/// var label = ""
/// }
///
/// // One suspension point, atomic access:
/// let snapshot = await store.run { iso in
/// iso.count += 1
/// iso.label = "count: \(iso.count)"
/// return (iso.count, iso.label)
/// }
/// ```
protocol ActorRunnable: Actor {
/// Executes `body` on the actor's executor and returns the result.
func run<T: Sendable>(_ body: @Sendable (isolated Self) throws -> T) rethrows -> T
}
extension ActorRunnable {
func run<T: Sendable>(_ body: @Sendable (isolated Self) throws -> T) rethrows -> T {
try body(self)
}
}
import Testing
@Suite("ActorRunnable Performance")
struct ActorRunnablePerformanceTests {
// MARK: - Test Actor
/// Actor with individual accessors — each call is one suspension point from outside.
actor ProfileStore: ActorRunnable {
var name = ""
var age = 0
var email = ""
var score = 0.0
var isActive = false
func setName(_ val: String) { name = val }
func setAge(_ val: Int) { age = val }
func setEmail(_ val: String) { email = val }
func setScore(_ val: Double) { score = val }
func setActive(_ val: Bool) { isActive = val }
func getName() -> String { name }
func getAge() -> Int { age }
func getEmail() -> String { email }
func getScore() -> Double { score }
func getActive() -> Bool { isActive }
}
// MARK: - Sequential: individual awaits vs batched run
@Test("batched run eliminates per-property suspension overhead")
func sequentialComparison() async {
let store = ProfileStore()
let clock = ContinuousClock()
let iterations = 10_000
// Warm up the actor executor.
await store.setName("warmup")
_ = await store.getName()
// --- 10 suspension points per iteration ---
let individualTime = await clock.measure {
for idx in 0..<iterations {
await store.setName("Alice")
await store.setAge(idx)
await store.setEmail("alice@test.com")
await store.setScore(Double(idx) * 0.5)
await store.setActive(idx.isMultiple(of: 2))
_ = await store.getName()
_ = await store.getAge()
_ = await store.getEmail()
_ = await store.getScore()
_ = await store.getActive()
}
}
// --- 2 suspension points per iteration ---
let batchedTime = await clock.measure {
for idx in 0..<iterations {
await store.run { iso in
iso.name = "Alice"
iso.age = idx
iso.email = "alice@test.com"
iso.score = Double(idx) * 0.5
iso.isActive = idx.isMultiple(of: 2)
}
_ = await store.run { iso in
(iso.name, iso.age, iso.email, iso.score, iso.isActive)
}
}
}
let speedup = nanoseconds(individualTime) / nanoseconds(batchedTime)
print("""
┌───────────────────────────────────────────────────────┐
│ Sequential · \(iterations) iterations │
├───────────────────────────────────────────────────────┤
│ Individual (10 awaits/iter): \(format(individualTime)) │
│ Batched (2 awaits/iter): \(format(batchedTime)) │
│ Speedup: \(String(format: "%.1f", speedup))× │
└───────────────────────────────────────────────────────┘
""")
#expect(batchedTime < individualTime, "Batched run should be faster than individual awaits")
}
// MARK: - Concurrent: contention amplifies the difference
@Test("batched run reduces contention under concurrent access")
func concurrentComparison() async {
let store = ProfileStore()
let clock = ContinuousClock()
let tasks = 50
let iterationsPerTask = 200
// --- 10 suspension points per iteration, N concurrent tasks ---
let individualTime = await clock.measure {
await withTaskGroup(of: Void.self) { group in
for _ in 0..<tasks {
group.addTask {
for idx in 0..<iterationsPerTask {
await store.setName("Alice")
await store.setAge(idx)
await store.setEmail("alice@test.com")
await store.setScore(Double(idx) * 0.5)
await store.setActive(idx.isMultiple(of: 2))
_ = await store.getName()
_ = await store.getAge()
_ = await store.getEmail()
_ = await store.getScore()
_ = await store.getActive()
}
}
}
}
}
// --- 2 suspension points per iteration, N concurrent tasks ---
let batchedTime = await clock.measure {
await withTaskGroup(of: Void.self) { group in
for _ in 0..<tasks {
group.addTask {
for idx in 0..<iterationsPerTask {
await store.run { iso in
iso.name = "Alice"
iso.age = idx
iso.email = "alice@test.com"
iso.score = Double(idx) * 0.5
iso.isActive = idx.isMultiple(of: 2)
}
_ = await store.run { iso in
(iso.name, iso.age, iso.email, iso.score, iso.isActive)
}
}
}
}
}
}
let speedup = nanoseconds(individualTime) / nanoseconds(batchedTime)
print("""
┌───────────────────────────────────────────────────────┐
│ Concurrent · \(tasks) tasks × \(iterationsPerTask) iterations │
├───────────────────────────────────────────────────────┤
│ Individual (10 awaits/iter): \(format(individualTime)) │
│ Batched (2 awaits/iter): \(format(batchedTime)) │
│ Speedup: \(String(format: "%.1f", speedup))× │
└───────────────────────────────────────────────────────┘
""")
#expect(batchedTime < individualTime, "Batched run should be faster under contention")
}
// MARK: - Helpers
private func nanoseconds(_ duration: Duration) -> Double {
let (seconds, attoseconds) = duration.components
return Double(seconds) * 1_000_000_000 + Double(attoseconds) / 1_000_000_000
}
private func format(_ duration: Duration) -> String {
let total = nanoseconds(duration)
if total >= 1_000_000_000 {
return String(format: "%.2f s", total / 1_000_000_000)
} else {
return String(format: "%.1f ms", total / 1_000_000)
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment