Native streaming for Electron, Tauri, and platform-native applications
Compare desktop application frameworks for your use case
npm install @wave/electron-sdkGet started with your chosen framework
npm install @wave/electron-sdk electron-builder # Or with yarn yarn add @wave/electron-sdk electron-builder
import { app, BrowserWindow, desktopCapturer, ipcMain } from 'electron';
import { WaveElectron, CaptureSource, StreamConfig } from '@wave/electron-sdk';
// Initialize WAVE SDK
const wave = new WaveElectron({
apiKey: process.env.WAVE_API_KEY!,
// Enable hardware acceleration if available
hardwareAcceleration: {
preferredEncoder: 'auto', // 'nvenc' | 'qsv' | 'vce' | 'auto'
fallbackToSoftware: true,
},
});
let mainWindow: BrowserWindow;
let currentStream: ReturnType<typeof wave.streams.create> | null = null;
app.whenReady().then(async () => {
mainWindow = new BrowserWindow({
width: 1200,
height: 800,
webPreferences: {
nodeIntegration: false,
contextIsolation: true,
preload: path.join(__dirname, 'preload.js'),
},
});
// Register IPC handlers
setupIpcHandlers();
mainWindow.loadFile('index.html');
});
function setupIpcHandlers() {
// Get available capture sources
ipcMain.handle('get-sources', async () => {
const sources = await desktopCapturer.getSources({
types: ['screen', 'window'],
thumbnailSize: { width: 320, height: 180 },
});
return sources.map(source => ({
id: source.id,
name: source.name,
thumbnail: source.thumbnail.toDataURL(),
appIcon: source.appIcon?.toDataURL(),
}));
});
// Start streaming
ipcMain.handle('start-stream', async (_, config: StreamConfig) => {
try {
currentStream = await wave.streams.create({
title: config.title || 'Desktop Stream',
protocol: 'webrtc',
source: {
type: config.sourceType,
id: config.sourceId,
captureOptions: {
cursor: 'always',
resolution: config.resolution || '1080p',
frameRate: config.frameRate || 60,
},
},
audio: {
system: {
enabled: config.captureSystemAudio,
device: 'default',
},
microphone: {
enabled: config.captureMicrophone,
device: config.microphoneDevice || 'default',
noiseSuppression: true,
echoCancellation: true,
},
},
encoding: {
codec: 'h264',
bitrate: config.bitrate || 6000,
keyframeInterval: 2,
},
});
return {
success: true,
streamUrl: currentStream.url,
streamKey: currentStream.key,
};
} catch (error) {
console.error('Stream creation failed:', error);
return { success: false, error: error.message };
}
});
// Stop streaming
ipcMain.handle('stop-stream', async () => {
if (currentStream) {
await wave.streams.stop(currentStream.id);
currentStream = null;
}
return { success: true };
});
// Get stream stats
ipcMain.handle('get-stream-stats', async () => {
if (!currentStream) return null;
return await wave.streams.getStats(currentStream.id);
});
}
// Handle app lifecycle
app.on('window-all-closed', async () => {
if (currentStream) {
await wave.streams.stop(currentStream.id);
}
if (process.platform !== 'darwin') app.quit();
});import { contextBridge, ipcRenderer } from 'electron';
contextBridge.exposeInMainWorld('waveDesktop', {
getSources: () => ipcRenderer.invoke('get-sources'),
startStream: (config: StreamConfig) => ipcRenderer.invoke('start-stream', config),
stopStream: () => ipcRenderer.invoke('stop-stream'),
getStreamStats: () => ipcRenderer.invoke('get-stream-stats'),
// Listen for stream events
onStreamEvent: (callback: (event: StreamEvent) => void) => {
ipcRenderer.on('stream-event', (_, event) => callback(event));
},
});import { useState, useEffect } from 'react';
interface CaptureSource {
id: string;
name: string;
thumbnail: string;
}
export function StreamController() {
const [sources, setSources] = useState<CaptureSource[]>([]);
const [selectedSource, setSelectedSource] = useState<string | null>(null);
const [isStreaming, setIsStreaming] = useState(false);
const [streamUrl, setStreamUrl] = useState<string | null>(null);
const [stats, setStats] = useState<StreamStats | null>(null);
useEffect(() => {
loadSources();
}, []);
useEffect(() => {
if (!isStreaming) return;
const interval = setInterval(async () => {
const stats = await window.waveDesktop.getStreamStats();
setStats(stats);
}, 1000);
return () => clearInterval(interval);
}, [isStreaming]);
async function loadSources() {
const sources = await window.waveDesktop.getSources();
setSources(sources);
}
async function startStream() {
if (!selectedSource) return;
const result = await window.waveDesktop.startStream({
sourceId: selectedSource,
sourceType: 'screen',
captureSystemAudio: true,
captureMicrophone: true,
resolution: '1080p',
frameRate: 60,
bitrate: 6000,
});
if (result.success) {
setIsStreaming(true);
setStreamUrl(result.streamUrl);
} else {
alert('Failed to start stream: ' + result.error);
}
}
async function stopStream() {
await window.waveDesktop.stopStream();
setIsStreaming(false);
setStreamUrl(null);
setStats(null);
}
return (
<div className="p-6">
<h2 className="text-xl font-bold mb-4">WAVE Desktop Streamer</h2>
{!isStreaming ? (
<div>
<h3 className="font-semibold mb-2">Select Source</h3>
<div className="grid grid-cols-3 gap-4">
{sources.map(source => (
<button
key={source.id}
onClick={() => setSelectedSource(source.id)}
className={`p-2 border rounded ${
selectedSource === source.id ? 'border-primary-500' : 'border-border-secondary'
}`}
>
<img src={source.thumbnail} alt={source.name} className="w-full" />
<p className="text-sm mt-1 truncate">{source.name}</p>
</button>
))}
</div>
<button
onClick={startStream}
disabled={!selectedSource}
className="mt-4 px-4 py-2 bg-primary-600 text-white rounded disabled:opacity-50"
>
Start Streaming
</button>
</div>
) : (
<div>
<div className="bg-success-100 p-4 rounded mb-4">
<p className="font-semibold text-success-800">🔴 Live</p>
<p className="text-sm text-success-700">{streamUrl}</p>
</div>
{stats && (
<div className="grid grid-cols-4 gap-4 mb-4">
<div className="bg-surface-tertiary p-3 rounded">
<p className="text-xs text-text-muted">Bitrate</p>
<p className="font-semibold">{(stats.bitrate / 1000).toFixed(0)} Kbps</p>
</div>
<div className="bg-surface-tertiary p-3 rounded">
<p className="text-xs text-text-muted">FPS</p>
<p className="font-semibold">{stats.frameRate}</p>
</div>
<div className="bg-surface-tertiary p-3 rounded">
<p className="text-xs text-text-muted">Resolution</p>
<p className="font-semibold">{stats.width}x{stats.height}</p>
</div>
<div className="bg-surface-tertiary p-3 rounded">
<p className="text-xs text-text-muted">Dropped Frames</p>
<p className="font-semibold">{stats.droppedFrames}</p>
</div>
</div>
)}
<button
onClick={stopStream}
className="px-4 py-2 bg-destructive-600 text-white rounded"
>
Stop Streaming
</button>
</div>
)}
</div>
);
}Reduce CPU usage by 80-95% with GPU encoding
| Encoder | Platforms | Quality | CPU Savings | Formats |
|---|---|---|---|---|
| NVIDIA NVENC | Windows, Linux | Excellent | 90%+ | H.264, HEVC, AV1 |
| AMD VCE/VCN | Windows, Linux | Very Good | 85%+ | H.264, HEVC |
| Intel QuickSync | Windows, macOS, Linux | Good | 80%+ | H.264, HEVC, AV1 |
| Apple VideoToolbox | macOS, iOS | Excellent | 95%+ | H.264, HEVC, ProRes |
const stream = await wave.streams.create({
// ... other config
encoding: {
codec: 'h264',
bitrate: 6000,
// Auto-detect: NVENC → QuickSync → VCE → Software
hardwareAcceleration: 'auto',
// Or specify explicitly
// hardwareAcceleration: 'nvenc',
// Quality presets
preset: 'quality', // 'quality' | 'balanced' | 'speed'
profile: 'high',
level: '4.1',
}
});
// Check which encoder was selected
const stats = await wave.streams.getStats(stream.id);
console.log('Using encoder:', stats.encoder); // 'NVENC' | 'QuickSync' | etc.Capture game audio, music, and application sounds
WASAPI Loopback - works out of the box
CoreAudio + ScreenCaptureKit audio
PipeWire or PulseAudio monitor
// Advanced audio configuration
const stream = await wave.streams.create({
// ... source config
audio: {
// System audio (games, music, etc.)
system: {
enabled: true,
device: 'default', // or enumerate specific devices
volume: 1.0,
// Per-application capture (Windows 10 1903+)
processFilter: {
mode: 'include', // 'include' | 'exclude'
processes: ['game.exe', 'spotify.exe'],
},
},
// Microphone
microphone: {
enabled: true,
device: 'default',
volume: 0.8,
// Audio processing
noiseSuppression: true,
echoCancellation: true,
autoGainControl: true,
// Voice activity detection
vadEnabled: true,
vadThreshold: -40, // dB
},
// Audio mixing
mix: {
mode: 'balanced',
systemGain: 0.7,
micGain: 1.0,
// Ducking: reduce system volume when speaking
ducking: {
enabled: true,
reduction: 0.5, // 50% reduction
attackTime: 100, // ms
releaseTime: 500, // ms
},
},
// Output format
output: {
sampleRate: 48000,
channels: 2, // stereo
bitDepth: 16,
},
},
});
// Get available audio devices
const devices = await wave.audio.getDevices();
console.log('Microphones:', devices.microphones);
console.log('System audio devices:', devices.systemAudio);Permissions, challenges, and solutions for each OS
Distribute your app through stores or direct download
| Channel | Platform | Requirements | Updates |
|---|---|---|---|
| Windows Store | Windows | MSIX Package Code Signing Store Certification | Auto-update via Store |
| Mac App Store | macOS | Notarization Sandboxing App Review | Auto-update via Store |
| Direct Download | All | Code Signing Hosting Update Server | electron-updater / Sparkle |
| Snap Store | Linux | snapcraft.yaml Snap Confinement | Auto-update via Snap |
| Flathub | Linux | Flatpak Manifest Flathub Review | Auto-update via Flatpak |
// package.json
{
"build": {
"appId": "com.yourcompany.wavestreamer",
"productName": "WAVE Streamer",
"publish": {
"provider": "github",
"releaseType": "release"
},
"mac": {
"category": "public.app-category.video",
"hardenedRuntime": true,
"gatekeeperAssess": false,
"entitlements": "build/entitlements.mac.plist",
"entitlementsInherit": "build/entitlements.mac.plist",
"target": [
{ "target": "dmg", "arch": ["x64", "arm64"] },
{ "target": "zip", "arch": ["x64", "arm64"] }
]
},
"win": {
"target": ["nsis", "portable"],
"certificateSubjectName": "Your Company Name",
"signDlls": true
},
"linux": {
"target": ["AppImage", "deb", "snap"],
"category": "Video"
},
"nsis": {
"oneClick": false,
"perMachine": true,
"allowToChangeInstallationDirectory": true
}
}
}
// Build commands
// npm run electron:build # Build for current platform
// npm run electron:build:all # Build for all platformsHotkeys, system tray, notifications, and more
Control streaming from any application
Ctrl+Shift+S: Start/StopCtrl+Shift+M: MuteMinimize to tray with status indicators
Native OS notifications for events
Stream startedConnection lostSeamless background updates
Delta updatesRollback supportimport { globalShortcut, app } from 'electron';
app.whenReady().then(() => {
// Start/Stop streaming
globalShortcut.register('CommandOrControl+Shift+S', async () => {
if (isStreaming) {
await stopStream();
showNotification('Stream Stopped', 'Your stream has ended');
} else {
await startStream();
showNotification('Stream Started', 'You are now live!');
}
});
// Toggle mute
globalShortcut.register('CommandOrControl+Shift+M', () => {
toggleMicrophone();
updateTrayIcon();
});
// Quick scene switch
globalShortcut.register('CommandOrControl+Shift+1', () => switchScene(0));
globalShortcut.register('CommandOrControl+Shift+2', () => switchScene(1));
globalShortcut.register('CommandOrControl+Shift+3', () => switchScene(2));
});
app.on('will-quit', () => {
globalShortcut.unregisterAll();
});Common issues and solutions