Skip to content

Instantly share code, notes, and snippets.

@mxvsh
Last active August 14, 2025 19:31
Show Gist options
  • Select an option

  • Save mxvsh/e6b1f795b43622387620fcb8e130ad31 to your computer and use it in GitHub Desktop.

Select an option

Save mxvsh/e6b1f795b43622387620fcb8e130ad31 to your computer and use it in GitHub Desktop.
macos-basic-hls
import SwiftUI
import AVFoundation
import Swifter
// MARK: - HLS Segmenter (fMP4 HLS via AVAssetWriter)
final class HLSSegmenter: NSObject, AVAssetWriterDelegate {
struct Segment {
let sequence: Int
let data: Data
}
private(set) var isRunning = false
private let lock = NSLock()
private var sequence: Int = -1 // media segment counter (init.mp4 doesn't count)
private(set) var initData: Data?
private(set) var segments: [Segment] = []
private var writer: AVAssetWriter!
private var videoInput: AVAssetWriterInput!
private let width: Int
private let height: Int
private let fps: Int32
init(width: Int, height: Int, fps: Int32 = 30) {
self.width = width
self.height = height
self.fps = fps
}
func start(at startTime: CMTime) throws {
writer = try AVAssetWriter(contentType: .mpeg4Movie)
writer.shouldOptimizeForNetworkUse = true
if #available(macOS 12.0, *) {
writer.outputFileTypeProfile = .mpeg4AppleHLS
writer.preferredOutputSegmentInterval = CMTime(seconds: 1, preferredTimescale: 1)
}
writer.delegate = self
let compression: [String: Any] = [
AVVideoAverageBitRateKey: 4_000_000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel
]
let settings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: width,
AVVideoHeightKey: height,
AVVideoCompressionPropertiesKey: compression
]
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: settings)
videoInput.expectsMediaDataInRealTime = true
guard writer.canAdd(videoInput) else { throw NSError(domain: "HLS", code: -1) }
writer.add(videoInput)
writer.initialSegmentStartTime = startTime
guard writer.startWriting() else { throw writer.error ?? NSError(domain: "HLS", code: -2) }
writer.startSession(atSourceTime: startTime)
lock.lock()
sequence = -1
initData = nil
segments.removeAll()
isRunning = true
lock.unlock()
}
func append(_ sampleBuffer: CMSampleBuffer) {
guard isRunning, videoInput.isReadyForMoreMediaData else { return }
_ = videoInput.append(sampleBuffer)
}
func stop(_ handler: @escaping () -> Void) {
guard isRunning else { handler(); return }
isRunning = false
videoInput.markAsFinished()
writer.finishWriting { handler() }
}
// MARK: - AVAssetWriterDelegate
private var nextSequenceNumber: Int = 0
func assetWriter(_ writer: AVAssetWriter,
didOutputSegmentData segmentData: Data,
segmentType: AVAssetSegmentType,
segmentReport: AVAssetSegmentReport?) {
lock.lock()
defer { lock.unlock() }
switch segmentType {
case .initialization:
initData = segmentData
case .separable:
let seqNum = nextSequenceNumber
nextSequenceNumber += 1
segments.append(.init(sequence: seqNum, data: segmentData))
// keep ~20 for safety
if segments.count > 20 {
segments.removeFirst(segments.count - 20)
}
default:
break
}
}
var playlist: String {
lock.lock()
defer { lock.unlock() }
// Always use exactly what's in `segments`
let last = segments.suffix(6)
guard let first = last.first else {
return """
#EXTM3U
#EXT-X-VERSION:9
#EXT-X-TARGETDURATION:2
#EXT-X-MEDIA-SEQUENCE:0
"""
}
let mediaSeq = first.sequence
var body = """
#EXTM3U
#EXT-X-VERSION:9
#EXT-X-TARGETDURATION:2
#EXT-X-MEDIA-SEQUENCE:\(mediaSeq)
#EXT-X-MAP:URI="init.mp4"
"""
for seg in last {
body += """
\n#EXTINF:1.0,
files/sequence\(seg.sequence).m4s
"""
}
return body + "\n"
}
func initSegment() -> Data? {
lock.lock(); defer { lock.unlock() }
return initData
}
func segment(sequence: Int) -> Data? {
lock.lock(); defer { lock.unlock() }
return segments.first(where: { $0.sequence == sequence })?.data
}
}
// MARK: - Screen Capture to CMSampleBuffer
final class ScreenCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
private let session = AVCaptureSession()
private let queue = DispatchQueue(label: "ScreenCapture.queue")
private let output = AVCaptureVideoDataOutput()
var onSample: ((CMSampleBuffer) -> Void)?
func start() throws {
let displayID = CGMainDisplayID()
guard let input = AVCaptureScreenInput(displayID: displayID) else {
throw NSError(domain: "ScreenCapture", code: -1)
}
input.minFrameDuration = CMTimeMake(value: 1, timescale: 30) // ~30fps
session.beginConfiguration()
if session.canAddInput(input) { session.addInput(input) }
output.setSampleBufferDelegate(self, queue: queue)
output.alwaysDiscardsLateVideoFrames = true
if session.canAddOutput(output) { session.addOutput(output) }
session.commitConfiguration()
session.startRunning()
}
func stop() { session.stopRunning() }
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
onSample?(sampleBuffer)
}
}
// MARK: - Embedded Web Server (Swifter)
final class HLSWebServer {
private let server = HttpServer()
private let port: in_port_t
private weak var segmenter: HLSSegmenter?
init(segmenter: HLSSegmenter, port: in_port_t = 8080) {
self.segmenter = segmenter
self.port = port
// Basic player page using hls.js (works cross-browser)
server["/"] = { _ in
let html = """
<!doctype html><html><body style="margin:0;background:#111;color:#eee;">
<div style="padding:12px;font-family:-apple-system,system-ui">HLS Screen Stream</div>
<video id="v" autoplay muted playsinline controls style="width:100vw;height:calc(100vh - 48px);"></video>
<script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script>
<script>
const v = document.getElementById('v');
const src = '/stream.m3u8';
if (v.canPlayType('application/vnd.apple.mpegurl')) { v.src = src; }
else if (window.Hls && Hls.isSupported()) { const hls = new Hls(); hls.loadSource(src); hls.attachMedia(v); }
else { document.body.insertAdjacentHTML('beforeend', '<p>HLS not supported.</p>'); }
</script>
</body></html>
"""
return .ok(.html(html))
}
// Playlist
server["/stream.m3u8"] = { [weak self] _ in
guard let m3u8 = self?.segmenter?.playlist.data(using: .utf8) else { return .internalServerError }
return .raw(200, "OK", ["Content-Type": "application/x-mpegURL"]) { writer in
try? writer.write(m3u8)
}
}
// Init segment (fMP4 header)
server["/init.mp4"] = { [weak self] _ in
guard let data = self?.segmenter?.initSegment() else { return .notFound }
return .raw(200, "OK", ["Content-Type": "video/mp4"]) { writer in
try? writer.write(data)
}
}
// Segment files: /files/sequence{N}.m4s
server["/files/:file"] = { req in
let name = req.params[":file"] ?? ""
if name == "init.mp4", let data = segmenter.initData {
return HttpResponse.raw(200, "OK", ["Content-Type": "video/mp4"]) { try $0.write(data) }
}
if name.hasPrefix("sequence"), name.hasSuffix(".m4s") {
let numStr = name.dropFirst("sequence".count).dropLast(".m4s".count)
if let num = Int(numStr) {
if let seg = segmenter.segments.first(where: { $0.sequence == num }) {
return HttpResponse.raw(200, "OK", ["Content-Type": "video/mp4"]) { try $0.write(seg.data) }
}
}
}
return HttpResponse.notFound
}
}
func start() throws { try server.start(port, forceIPv4: true) }
func stop() { server.stop() }
}
// MARK: - App
@main
struct HLS_ScreenStreamerApp: App {
@StateObject private var vm = AppViewModel()
var body: some Scene {
WindowGroup {
VStack(spacing: 16) {
Text("HLS Screen Streamer")
.font(.title2).padding(.top, 12)
Text(vm.status).font(.caption)
HStack {
Button(vm.isRunning ? "Stop" : "Start") { vm.toggle() }
.keyboardShortcut(.space)
.padding()
.background(Color.accentColor.opacity(0.15))
.cornerRadius(10)
if vm.isRunning {
Text("Open: http://localhost:\(vm.port)/")
.font(.footnote).textSelection(.enabled)
}
}
Spacer()
}
.frame(width: 420, height: 220)
.padding()
}
}
}
final class AppViewModel: ObservableObject {
@Published var isRunning = false
@Published var status = "Idle"
let port: Int32 = 8080
private var capture: ScreenCapture?
private var segmenter: HLSSegmenter?
private var server: HLSWebServer?
func toggle() {
isRunning ? stop() : start()
}
func start() {
// Choose a working resolution; using main display size can be large—downscale if needed.
let screenSize = NSScreen.main?.frame.size ?? .init(width: 1280, height: 720)
let w = Int(screenSize.width)
let h = Int(screenSize.height)
let seg = HLSSegmenter(width: w, height: h, fps: 30)
let cap = ScreenCapture()
do {
try seg.start(at: CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 1000))
} catch {
status = "Failed to start writer: \(error)"
return
}
cap.onSample = { [weak seg] sample in
seg?.append(sample)
}
do {
try cap.start()
} catch {
status = "Capture error: \(error)"
return
}
let web = HLSWebServer(segmenter: seg, port: in_port_t(port))
do { try web.start() } catch {
status = "HTTP server error: \(error)"
cap.stop()
seg.stop({})
return
}
capture = cap
segmenter = seg
server = web
isRunning = true
status = "Streaming… Visit http://localhost:\(port)/"
}
func stop() {
isRunning = false
capture?.stop()
segmenter?.stop { }
server?.stop()
status = "Stopped"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment