From ce9b884948c43c2339d6bb699a5420afe56e278f Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Wed, 6 May 2026 14:06:49 -0400
Subject: [PATCH 01/29] Improve Android WebRTC input and encoding performance
---
README.md | 16 +-
cli/native/XCWNativeBridge.h | 5 +
cli/native/XCWNativeBridge.m | 234 +++
client/src/api/types.ts | 7 +
client/src/app/AppShell.tsx | 6 +-
.../features/simulators/simulatorDisplay.ts | 3 +
client/src/features/viewport/DeviceChrome.tsx | 51 +-
.../features/viewport/SimulatorViewport.tsx | 3 +
client/src/styles/components.css | 48 +
docs/api/rest.md | 65 +-
docs/cli/commands.md | 15 +-
docs/extensions/browser-client.md | 6 +-
docs/guide/architecture.md | 18 +-
docs/guide/installation.md | 1 +
server/Cargo.lock | 348 ++++-
server/Cargo.toml | 3 +
server/src/android.rs | 1355 +++++++++++++++++
server/src/api/routes.rs | 692 ++++++++-
server/src/main.rs | 193 ++-
server/src/native/ffi.rs | 17 +
server/src/transport/webrtc.rs | 662 +++++++-
skills/simdeck/SKILL.md | 11 +-
22 files changed, 3600 insertions(+), 159 deletions(-)
create mode 100644 server/src/android.rs
diff --git a/README.md b/README.md
index ddf7fde4..e638cfa7 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@
SimDeck is a developer tool built for streamlining mobile app development for coding agents.
- Drive Simulator from the CLI using agents, browser, and automated tests on macOS.
+ Drive iOS Simulators and Android emulators from the CLI using agents, browser, and automated tests on macOS.
@@ -35,8 +35,9 @@ view inside the editor.
## Features
-- Local simulator video stream over browser-native WebRTC H.264 with H.264 WebSocket fallback
-- Full simulator control & inspection using private accessibility APIs - available using `simdeck` CLI
+- Local iOS Simulator and Android emulator video over browser-native WebRTC H.264 with H.264 WebSocket fallback
+- Android emulator frames are sourced from emulator gRPC and encoded through macOS VideoToolbox
+- Full simulator control & inspection using private iOS accessibility APIs and Android UIAutomator - available using `simdeck` CLI
- Real-time screen `describe` command using accessibility view tree - available in token-efficient format for agents
- CoreSimulator chrome asset rendering for device bezels
- NativeScript, React Native, Flutter, UIKit and SwiftUI runtime inspector plugins to view app's view hierarchy live
@@ -138,6 +139,7 @@ simdeck boot
simdeck shutdown
simdeck erase
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck uninstall com.example.App
simdeck open-url https://example.com
simdeck launch com.apple.Preferences
@@ -179,6 +181,14 @@ simdeck logs --seconds 30 --limit 200
without launching Simulator.app, then falls back to `xcrun simctl` when private
booting is unavailable.
+Android emulators appear in `simdeck list` with IDs like
+`android:SimDeck_Pixel_8_API_36`. For Android IDs, lifecycle, install, launch,
+URL, screenshot, logs, UIAutomator `describe`, tap, swipe, text, key, home, app
+switcher, rotation, pasteboard, and browser live view route through the Android
+SDK tools (`emulator` and `adb`) plus the emulator gRPC screenshot stream for
+live video. `simdeck stream` remains iOS-only because it writes the iOS H.264
+transport stream.
+
`stream` writes an Annex B H.264 elementary stream to stdout for diagnostics or
external tools such as `ffplay`.
diff --git a/cli/native/XCWNativeBridge.h b/cli/native/XCWNativeBridge.h
index a7d81d61..ff4ac7f1 100644
--- a/cli/native/XCWNativeBridge.h
+++ b/cli/native/XCWNativeBridge.h
@@ -89,6 +89,11 @@ bool xcw_native_session_rotate_right(void * _Nonnull handle, char * _Nullable *
bool xcw_native_session_rotate_left(void * _Nonnull handle, char * _Nullable * _Nullable error_message);
void xcw_native_session_set_frame_callback(void * _Nonnull handle, xcw_native_frame_callback _Nullable callback, void * _Nullable user_data);
+void * _Nullable xcw_native_h264_encoder_create(xcw_native_frame_callback _Nullable callback, void * _Nullable user_data, char * _Nullable * _Nullable error_message);
+void xcw_native_h264_encoder_destroy(void * _Nullable handle);
+bool xcw_native_h264_encoder_encode_rgba(void * _Nonnull handle, const uint8_t * _Nonnull rgba, size_t length, uint32_t width, uint32_t height, uint64_t timestamp_us, char * _Nullable * _Nullable error_message);
+void xcw_native_h264_encoder_request_keyframe(void * _Nonnull handle);
+
void xcw_native_free_string(char * _Nullable value);
void xcw_native_free_bytes(xcw_native_owned_bytes bytes);
void xcw_native_release_shared_bytes(xcw_native_shared_bytes bytes);
diff --git a/cli/native/XCWNativeBridge.m b/cli/native/XCWNativeBridge.m
index 3fc2376a..e93241e7 100644
--- a/cli/native/XCWNativeBridge.m
+++ b/cli/native/XCWNativeBridge.m
@@ -3,11 +3,13 @@
#import "DFPrivateSimulatorDisplayBridge.h"
#import "XCWAccessibilityBridge.h"
#import "XCWChromeRenderer.h"
+#import "XCWH264Encoder.h"
#import "XCWNativeSession.h"
#import "XCWSimctl.h"
#import
#import
+#import
#include
#include
@@ -63,10 +65,190 @@ static xcw_native_owned_bytes XCWOwnedBytesFromData(NSData *data) {
return bytes;
}
+static xcw_native_shared_bytes XCWSharedBytesFromData(NSData *data) {
+ if (data.length == 0) {
+ return (xcw_native_shared_bytes){0};
+ }
+
+ CFTypeRef owner = CFRetain((__bridge CFTypeRef)data);
+ return (xcw_native_shared_bytes){
+ .data = data.bytes,
+ .length = data.length,
+ .owner = (const void *)owner,
+ };
+}
+
static XCWNativeSession *XCWNativeSessionFromHandle(void *handle) {
return (__bridge XCWNativeSession *)handle;
}
+@interface XCWNativeH264Encoder : NSObject
+
+- (instancetype)initWithFrameCallback:(xcw_native_frame_callback)callback
+ userData:(void *)userData;
+- (BOOL)encodeRGBA:(const uint8_t *)rgba
+ length:(size_t)length
+ width:(uint32_t)width
+ height:(uint32_t)height
+ error:(NSError * _Nullable __autoreleasing *)error;
+- (void)requestKeyFrame;
+- (void)invalidate;
+
+@end
+
+@implementation XCWNativeH264Encoder {
+ XCWH264Encoder *_encoder;
+ xcw_native_frame_callback _callback;
+ void *_callbackUserData;
+ uint64_t _frameSequence;
+}
+
+- (instancetype)initWithFrameCallback:(xcw_native_frame_callback)callback
+ userData:(void *)userData {
+ self = [super init];
+ if (self == nil) {
+ return nil;
+ }
+
+ _callback = callback;
+ _callbackUserData = userData;
+ __weak typeof(self) weakSelf = self;
+ @synchronized (XCWNativeH264Encoder.class) {
+ const char *previousCodec = getenv("SIMDECK_VIDEO_CODEC");
+ char *previousCodecCopy = previousCodec != NULL ? strdup(previousCodec) : NULL;
+ const char *androidCodec = getenv("SIMDECK_ANDROID_VIDEO_CODEC");
+ if (androidCodec == NULL || strlen(androidCodec) == 0) {
+ androidCodec = "software";
+ }
+ setenv("SIMDECK_VIDEO_CODEC", androidCodec, 1);
+ _encoder = [[XCWH264Encoder alloc] initWithOutputHandler:^(NSData *sampleData,
+ uint64_t timestampUs,
+ BOOL isKeyFrame,
+ NSString * _Nullable codec,
+ NSData * _Nullable decoderConfig,
+ CGSize dimensions) {
+ __strong typeof(weakSelf) strongSelf = weakSelf;
+ if (strongSelf == nil || strongSelf->_callback == NULL || sampleData.length == 0) {
+ return;
+ }
+ strongSelf->_frameSequence += 1;
+ xcw_native_frame frame = {
+ .frame_sequence = strongSelf->_frameSequence,
+ .timestamp_us = timestampUs,
+ .is_keyframe = isKeyFrame,
+ .width = (uint32_t)llround(dimensions.width),
+ .height = (uint32_t)llround(dimensions.height),
+ .codec = codec.UTF8String,
+ .description = XCWSharedBytesFromData(decoderConfig),
+ .data = XCWSharedBytesFromData(sampleData),
+ };
+ strongSelf->_callback(&frame, strongSelf->_callbackUserData);
+ }];
+ if (previousCodecCopy != NULL) {
+ setenv("SIMDECK_VIDEO_CODEC", previousCodecCopy, 1);
+ free(previousCodecCopy);
+ } else {
+ unsetenv("SIMDECK_VIDEO_CODEC");
+ }
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)encodeRGBA:(const uint8_t *)rgba
+ length:(size_t)length
+ width:(uint32_t)width
+ height:(uint32_t)height
+ error:(NSError * _Nullable __autoreleasing *)error {
+ if (rgba == NULL || width == 0 || height == 0) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:1
+ userInfo:@{ NSLocalizedDescriptionKey: @"RGBA frame input was empty." }];
+ }
+ return NO;
+ }
+ size_t expectedLength = (size_t)width * (size_t)height * 4;
+ if (length < expectedLength) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:2
+ userInfo:@{ NSLocalizedDescriptionKey: @"RGBA frame input was truncated." }];
+ }
+ return NO;
+ }
+
+ NSDictionary *attributes = @{
+ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
+ (__bridge NSString *)kCVPixelBufferWidthKey: @(width),
+ (__bridge NSString *)kCVPixelBufferHeightKey: @(height),
+ (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey: @{},
+ };
+ CVPixelBufferRef pixelBuffer = NULL;
+ CVReturn createStatus = CVPixelBufferCreate(kCFAllocatorDefault,
+ (size_t)width,
+ (size_t)height,
+ kCVPixelFormatType_32BGRA,
+ (__bridge CFDictionaryRef)attributes,
+ &pixelBuffer);
+ if (createStatus != kCVReturnSuccess || pixelBuffer == NULL) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:createStatus
+ userInfo:@{ NSLocalizedDescriptionKey: @"Unable to allocate a VideoToolbox pixel buffer." }];
+ }
+ return NO;
+ }
+
+ CVReturn lockStatus = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ if (lockStatus != kCVReturnSuccess) {
+ CVPixelBufferRelease(pixelBuffer);
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:lockStatus
+ userInfo:@{ NSLocalizedDescriptionKey: @"Unable to lock a VideoToolbox pixel buffer." }];
+ }
+ return NO;
+ }
+
+ uint8_t *dst = CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t dstRowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer);
+ size_t srcRowBytes = (size_t)width * 4;
+ for (uint32_t y = 0; y < height; y += 1) {
+ const uint8_t *srcRow = rgba + ((size_t)y * srcRowBytes);
+ uint8_t *dstRow = dst + ((size_t)y * dstRowBytes);
+ for (uint32_t x = 0; x < width; x += 1) {
+ const uint8_t *src = srcRow + ((size_t)x * 4);
+ uint8_t *pixel = dstRow + ((size_t)x * 4);
+ pixel[0] = src[2];
+ pixel[1] = src[1];
+ pixel[2] = src[0];
+ pixel[3] = src[3];
+ }
+ }
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ [_encoder encodePixelBuffer:pixelBuffer];
+ CVPixelBufferRelease(pixelBuffer);
+ return YES;
+}
+
+- (void)requestKeyFrame {
+ [_encoder requestKeyFrame];
+}
+
+- (void)invalidate {
+ [_encoder invalidate];
+}
+
+@end
+
+static XCWNativeH264Encoder *XCWNativeH264EncoderFromHandle(void *handle) {
+ return (__bridge XCWNativeH264Encoder *)handle;
+}
+
static BOOL XCWPerformSimctlAction(char **errorMessage, BOOL (^action)(XCWSimctl *simctl, NSError **error)) {
XCWSimctl *simctl = [[XCWSimctl alloc] init];
NSError *error = nil;
@@ -889,6 +1071,58 @@ void xcw_native_session_set_frame_callback(void *handle, xcw_native_frame_callba
}
}
+void *xcw_native_h264_encoder_create(xcw_native_frame_callback callback, void *user_data, char **error_message) {
+ @autoreleasepool {
+ XCWNativeH264Encoder *encoder = [[XCWNativeH264Encoder alloc] initWithFrameCallback:callback
+ userData:user_data];
+ if (encoder == nil) {
+ if (error_message != NULL) {
+ *error_message = XCWCopyCString(@"Unable to create the native H.264 encoder.");
+ }
+ return NULL;
+ }
+ return (__bridge_retained void *)encoder;
+ }
+}
+
+void xcw_native_h264_encoder_destroy(void *handle) {
+ if (handle == NULL) {
+ return;
+ }
+ @autoreleasepool {
+ XCWNativeH264Encoder *encoder = CFBridgingRelease(handle);
+ [encoder invalidate];
+ }
+}
+
+bool xcw_native_h264_encoder_encode_rgba(void *handle,
+ const uint8_t *rgba,
+ size_t length,
+ uint32_t width,
+ uint32_t height,
+ uint64_t timestamp_us,
+ char **error_message) {
+ (void)timestamp_us;
+ @autoreleasepool {
+ NSError *error = nil;
+ BOOL ok = [XCWNativeH264EncoderFromHandle(handle) encodeRGBA:rgba
+ length:length
+ width:width
+ height:height
+ error:&error];
+ if (!ok) {
+ XCWSetErrorMessage(error_message, error);
+ }
+ return ok;
+ }
+}
+
+void xcw_native_h264_encoder_request_keyframe(void *handle) {
+ @autoreleasepool {
+ [XCWNativeH264EncoderFromHandle(handle) requestKeyFrame];
+ }
+}
+
void xcw_native_free_string(char *value) {
if (value != NULL) {
free(value);
diff --git a/client/src/api/types.ts b/client/src/api/types.ts
index 0f4b3c9d..79d6b5dc 100644
--- a/client/src/api/types.ts
+++ b/client/src/api/types.ts
@@ -28,11 +28,17 @@ export interface PrivateDisplayInfo {
export interface SimulatorMetadata {
udid: string;
name: string;
+ platform?: "ios-simulator" | "android-emulator" | string;
runtimeName?: string;
runtimeIdentifier?: string;
deviceTypeName?: string;
deviceTypeIdentifier?: string;
isBooted: boolean;
+ android?: {
+ avdName?: string;
+ grpcPort?: number;
+ serial?: string;
+ };
privateDisplay?: PrivateDisplayInfo;
}
@@ -112,6 +118,7 @@ export interface ChromeProfile {
screenWidth: number;
screenHeight: number;
cornerRadius: number;
+ chromeStyle?: "asset" | "css-android" | string;
hasScreenMask?: boolean;
buttons?: ChromeButtonProfile[];
}
diff --git a/client/src/app/AppShell.tsx b/client/src/app/AppShell.tsx
index f984108f..f634dc56 100644
--- a/client/src/app/AppShell.tsx
+++ b/client/src/app/AppShell.tsx
@@ -745,9 +745,12 @@ export function AppShell({
: "",
[selectedSimulator?.udid, streamStamp],
);
+ const chromeUsesAsset = Boolean(
+ viewportChromeProfile && viewportChromeProfile.chromeStyle !== "css-android",
+ );
const chromeRequired = Boolean(
(shouldRenderChrome && !chromeProfileReady) ||
- (viewportChromeProfile && chromeUrl),
+ (chromeUsesAsset && chromeUrl),
);
const simulatorRotationQuarterTurns =
normalizeSimulatorRotationQuarterTurns(selectedSimulator);
@@ -1987,6 +1990,7 @@ export function AppShell({
chromeProfile={viewportChromeProfile}
chromeRequired={chromeRequired}
chromeScreenStyle={viewportScreenStyle}
+ chromeStyle={viewportChromeProfile?.chromeStyle}
chromeUrl={chromeUrl}
chromeButtonUrl={chromeButtonUrl}
debugPanel={
diff --git a/client/src/features/simulators/simulatorDisplay.ts b/client/src/features/simulators/simulatorDisplay.ts
index 0837ef20..dc2cd995 100644
--- a/client/src/features/simulators/simulatorDisplay.ts
+++ b/client/src/features/simulators/simulatorDisplay.ts
@@ -14,6 +14,9 @@ export function simulatorRuntimeLabel(simulator: SimulatorMetadata): string {
export function shouldRenderNativeChrome(
simulator: SimulatorMetadata,
): boolean {
+ if (simulator.platform === "android-emulator") {
+ return true;
+ }
const identifier = simulator.deviceTypeIdentifier ?? "";
const name = simulator.name ?? "";
const deviceTypeName = simulator.deviceTypeName ?? "";
diff --git a/client/src/features/viewport/DeviceChrome.tsx b/client/src/features/viewport/DeviceChrome.tsx
index dfa0bb8c..2ce43f4c 100644
--- a/client/src/features/viewport/DeviceChrome.tsx
+++ b/client/src/features/viewport/DeviceChrome.tsx
@@ -17,6 +17,7 @@ interface DeviceChromeProps {
accessibilitySelectedId: string;
chromeProfile: ChromeProfile | null;
chromeScreenStyle: CSSProperties | null;
+ chromeStyle?: string;
chromeUrl: string;
chromeButtonUrl: (button: string, pressed?: boolean) => string;
hasFrame: boolean;
@@ -61,6 +62,7 @@ export function DeviceChrome({
accessibilitySelectedId,
chromeProfile,
chromeScreenStyle,
+ chromeStyle,
chromeUrl,
chromeButtonUrl,
hasFrame,
@@ -93,35 +95,42 @@ export function DeviceChrome({
useChromeProfile,
}: DeviceChromeProps) {
if (useChromeProfile) {
+ const useCssAndroidChrome = chromeStyle === "css-android";
return (
-
-
-
+ {useCssAndroidChrome ? (
+
+ ) : (
+ <>
+
+
+
+ >
+ )}
simdeck erase
```
-`list` returns the same simulator inventory the browser UI renders. Lifecycle commands return JSON and use the native bridge, preferring private CoreSimulator paths when available and falling back to `xcrun simctl`.
+`list` returns the same simulator inventory the browser UI renders, including
+Android AVDs as IDs like `android:Pixel_8_API_36`. iOS lifecycle commands use
+the native bridge, preferring private CoreSimulator paths when available and
+falling back to `xcrun simctl`. Android lifecycle commands use the Android SDK
+`emulator` and `adb` tools.
## Apps And URLs
```sh
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck uninstall com.example.App
simdeck launch com.example.App
simdeck open-url https://example.com
@@ -269,9 +274,13 @@ simdeck chrome-profile
`stream` writes Annex B H.264 samples to stdout and runs until interrupted, or
until `--frames` samples have been written. It is intended for diagnostics and
-external tools.
+external tools, and is iOS-only. Android live viewing in the browser uses the
+WebRTC H.264 endpoint; raw frames come from emulator gRPC and are encoded
+through VideoToolbox.
-`logs` fetches recent simulator logs. `chrome-profile` returns the CoreSimulator chrome layout used by the browser viewport.
+`logs` fetches recent simulator logs or Android `logcat` output. `chrome-profile`
+returns the CoreSimulator chrome layout for iOS and a screen-sized profile for
+Android.
## HTTP Fast Path
diff --git a/docs/extensions/browser-client.md b/docs/extensions/browser-client.md
index b9124c30..938d8184 100644
--- a/docs/extensions/browser-client.md
+++ b/docs/extensions/browser-client.md
@@ -44,7 +44,7 @@ client/
| `api/` | Typed wrappers around the SimDeck REST API and shared TypeScript types. |
| `features/simulators/` | Sidebar list of simulators plus boot/shutdown affordances. |
| `features/viewport/` | Frame canvas, chrome compositing, hit testing. |
-| `features/stream/` | WebRTC client, receiver stats, and video frame plumbing. |
+| `features/stream/` | WebRTC H.264 client for iOS and Android, receiver stats, and frame plumbing. |
| `features/input/` | Touch / keyboard / hardware-button affordances. |
| `features/accessibility/` | Accessibility tree pane and source switcher. |
| `features/toolbar/` | Top toolbar (rotate, home, app switcher, dark mode toggle, refresh). |
@@ -55,8 +55,8 @@ client/
2. `main.tsx` mounts the React tree at `#root`.
3. `AppShell` calls `GET /api/health` to learn the active encoder mode.
4. The simulator sidebar fetches `GET /api/simulators` and renders the list.
-5. Selecting a simulator posts an SDP offer to `/api/simulators//webrtc/offer`.
-6. The browser renders the H.264 video track through native WebRTC playback.
+5. Selecting a device posts an SDP offer to `/api/simulators//webrtc/offer`.
+6. The browser renders the H.264 video track through native WebRTC playback. Android emulator frames are sourced from emulator gRPC on the server and encoded through VideoToolbox before WebRTC delivery.
7. Touch and key events round-trip through `POST /api/simulators//touch` and `/key`.
## Dev workflow
diff --git a/docs/guide/architecture.md b/docs/guide/architecture.md
index 4468b188..641d4514 100644
--- a/docs/guide/architecture.md
+++ b/docs/guide/architecture.md
@@ -4,13 +4,13 @@ SimDeck is intentionally split into a small number of clearly-scoped layers. Eve
## High-level layout
-SimDeck has three layers stacked between the browser and the iOS Simulator:
+SimDeck has three layers stacked between the browser and the target device:
-1. **Browser / VS Code** runs the React client from `client/`. It speaks HTTP for control and WebRTC for live video, served by the Rust server.
+1. **Browser / VS Code** runs the React client from `client/`. It speaks HTTP for control and WebRTC H.264 for live video, served by the Rust server.
2. **The Rust server** (`server/`, built on `axum` + `tokio`) owns the CLI entrypoint, project daemon lifecycle, REST routes (`api/`), the stream transports (`transport/`), the inspector WebSocket hub (`inspector.rs`), the per-UDID session registry (`simulators/`), metrics, and log streaming.
-3. **The Objective-C bridge** (`cli/`) is reached through a narrow C ABI in `cli/native/XCWNativeBridge.*`. It wraps `xcrun simctl`, the private `CoreSimulator` direct-boot path, the per-session hardware/software H.264 encoder, the headless display bridge that produces frames and accepts HID input, and the device-chrome renderer.
+3. **Native device bridges** own platform-specific work. The Objective-C bridge (`cli/`) is reached through a narrow C ABI in `cli/native/XCWNativeBridge.*` for iOS. The Rust Android bridge (`server/src/android.rs`) shells out to the Android SDK for AVD discovery, emulator lifecycle, ADB input, screenshots, UIAutomator, and logcat.
-Underneath all of that is the iOS Simulator itself — `CoreSimulator` for lifecycle, `SimulatorKit` for chrome assets.
+Underneath all of that are the iOS Simulator (`CoreSimulator` and `SimulatorKit`) and the Android emulator (`emulator` and `adb`).
## Layer responsibilities
@@ -24,6 +24,7 @@ Key modules:
| ----------------------------------- | ---------------------------------------------------------------------------------------------------------- |
| `server/src/main.rs` | CLI entrypoint, project daemon management, AppKit main-thread shim, tokio runtime bootstrap. |
| `server/src/api/routes.rs` | Every `/api/*` route, including simulator control, accessibility, and inspector proxy. |
+| `server/src/android.rs` | Android AVD discovery, emulator lifecycle, emulator gRPC input/video, screenshots, UIAutomator, and logcat. |
| `server/src/transport/webrtc.rs` | WebRTC offer/answer endpoint for H.264 browser video. |
| `server/src/transport/packet.rs` | Shared encoded frame type used between simulator sessions and transports. |
| `server/src/inspector.rs` | WebSocket hub for the NativeScript runtime inspector. |
@@ -53,13 +54,16 @@ Inside the bridge:
### `client/` — React browser UI
-The React app served at `/` is a thin shell that calls the REST API and consumes live video over WebRTC H.264.
+The React app served at `/` is a thin shell that calls the REST API. It consumes
+live device video over WebRTC H.264. iOS frames come from the native simulator
+display bridge; Android frames come from emulator gRPC `streamScreenshot` and
+are encoded through VideoToolbox on the server.
Layout under `client/src/`:
- `app/AppShell.tsx` — top-level shell.
- `api/` — typed wrappers around `/api/*` (`client.ts`, `controls.ts`, `simulators.ts`, `types.ts`).
-- `features/stream/` — WebRTC client, receiver stats, and video frame plumbing.
+- `features/stream/` — WebRTC client, receiver stats, and frame plumbing.
- `features/viewport/` — frame canvas, hit testing, chrome compositing.
- `features/input/` — touch/keyboard/hardware button affordances.
- `features/accessibility/` — accessibility tree pane and source switcher.
@@ -85,7 +89,7 @@ Most control endpoints follow the same path: a typed Rust handler in `server/src
### Live video
-The browser posts an SDP offer to `/api/simulators/{udid}/webrtc/offer`. The handler in `transport::webrtc` ensures the per-UDID `SimulatorSession` is started, waits up to ~3 s for the first H.264 keyframe, returns an SDP answer, and writes the simulator frame source to a WebRTC video track.
+The browser posts an SDP offer to `/api/simulators/{udid}/webrtc/offer`. The handler in `transport::webrtc` starts the selected frame source, waits for the first H.264 keyframe, returns an SDP answer, and writes H.264 samples to a WebRTC video track. For Android, that source is emulator gRPC raw pixels passed through the shared VideoToolbox encoder path.
### Input
diff --git a/docs/guide/installation.md b/docs/guide/installation.md
index 43e33926..0fabaa30 100644
--- a/docs/guide/installation.md
+++ b/docs/guide/installation.md
@@ -10,6 +10,7 @@ SimDeck only runs on macOS. The native bridge links private `CoreSimulator` and
| ---------------------------------- | ------------------------------------------------------------------------------------ |
| **macOS 13+** | Required for current `CoreSimulator` and Apple's VideoToolbox H.264 encoder. |
| **Xcode + iOS Simulator runtimes** | The native bridge invokes `xcrun simctl` and the Simulator app. |
+| **Android SDK tools** | Optional. Required for Android emulator support (`emulator`, `adb`, and AVD images). |
| **Node.js ≥ 18** | The launcher (`bin/simdeck.mjs`) and the bundled client tooling. |
| **Rust (stable)** | Required only when building from source. Installed via [rustup](https://rustup.rs/). |
diff --git a/server/Cargo.lock b/server/Cargo.lock
index a0120b67..9c7c7db3 100644
--- a/server/Cargo.lock
+++ b/server/Cargo.lock
@@ -150,6 +150,28 @@ dependencies = [
"syn",
]
+[[package]]
+name = "async-stream"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "async-trait"
version = "0.1.89"
@@ -173,13 +195,40 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+[[package]]
+name = "axum"
+version = "0.7.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
+dependencies = [
+ "async-trait",
+ "axum-core 0.4.5",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "itoa",
+ "matchit 0.7.3",
+ "memchr",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustversion",
+ "serde",
+ "sync_wrapper",
+ "tower 0.5.3",
+ "tower-layer",
+ "tower-service",
+]
+
[[package]]
name = "axum"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31b698c5f9a010f6573133b09e0de5408834d0c82f8d7475a89fc1867a71cd90"
dependencies = [
- "axum-core",
+ "axum-core 0.5.6",
"base64",
"bytes",
"form_urlencoded",
@@ -190,7 +239,7 @@ dependencies = [
"hyper",
"hyper-util",
"itoa",
- "matchit",
+ "matchit 0.8.4",
"memchr",
"mime",
"percent-encoding",
@@ -203,12 +252,32 @@ dependencies = [
"sync_wrapper",
"tokio",
"tokio-tungstenite",
- "tower",
+ "tower 0.5.3",
"tower-layer",
"tower-service",
"tracing",
]
+[[package]]
+name = "axum-core"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "mime",
+ "pin-project-lite",
+ "rustversion",
+ "sync_wrapper",
+ "tower-layer",
+ "tower-service",
+]
+
[[package]]
name = "axum-core"
version = "0.5.6"
@@ -314,9 +383,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.2.60"
+version = "1.2.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43c5703da9466b66a946814e1adf53ea2c90f10063b86290cc9eb67ce3478a20"
+checksum = "a1dce859f0832a7d088c4f1119888ab94ef4b5d6795d1ce05afb7fe159d79f98"
dependencies = [
"find-msvc-tools",
"shlex",
@@ -486,9 +555,9 @@ dependencies = [
[[package]]
name = "data-encoding"
-version = "2.10.0"
+version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea"
+checksum = "a4ae5f15dda3c708c0ade84bfee31ccab44a3da4f88015ed22f63732abe300c8"
[[package]]
name = "der"
@@ -561,6 +630,12 @@ dependencies = [
"spki",
]
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
[[package]]
name = "elliptic-curve"
version = "0.13.8"
@@ -620,6 +695,12 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
[[package]]
name = "foldhash"
version = "0.1.5"
@@ -791,6 +872,31 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "h2"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "171fefbc92fe4a4de27e0698d6a5b392d6a0e333506bc49133760b3bcf948733"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "http",
+ "indexmap 2.14.0",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -802,9 +908,9 @@ dependencies = [
[[package]]
name = "hashbrown"
-version = "0.17.0"
+version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51"
+checksum = "ed5909b6e89a2db4456e54cd5f673791d7eca6732202bbf2a9cc504fe2f9b84a"
[[package]]
name = "heck"
@@ -897,6 +1003,7 @@ dependencies = [
"bytes",
"futures-channel",
"futures-core",
+ "h2",
"http",
"http-body",
"httparse",
@@ -905,6 +1012,20 @@ dependencies = [
"pin-project-lite",
"smallvec",
"tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-timeout"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
+dependencies = [
+ "hyper",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
]
[[package]]
@@ -914,12 +1035,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
dependencies = [
"bytes",
+ "futures-channel",
+ "futures-util",
"http",
"http-body",
"hyper",
+ "libc",
"pin-project-lite",
+ "socket2 0.6.3",
"tokio",
"tower-service",
+ "tracing",
]
[[package]]
@@ -1023,14 +1149,24 @@ dependencies = [
[[package]]
name = "idna_adapter"
-version = "1.2.1"
+version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+checksum = "cb68373c0d6620ef8105e855e7745e18b0d00d3bdb07fb532e434244cdb9a714"
dependencies = [
"icu_normalizer",
"icu_properties",
]
+[[package]]
+name = "indexmap"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+dependencies = [
+ "autocfg",
+ "hashbrown 0.12.3",
+]
+
[[package]]
name = "indexmap"
version = "2.14.0"
@@ -1038,7 +1174,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9"
dependencies = [
"equivalent",
- "hashbrown 0.17.0",
+ "hashbrown 0.17.1",
"serde",
"serde_core",
]
@@ -1085,6 +1221,15 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
[[package]]
name = "itoa"
version = "1.0.18"
@@ -1093,10 +1238,12 @@ checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682"
[[package]]
name = "js-sys"
-version = "0.3.95"
+version = "0.3.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca"
+checksum = "67df7112613f8bfd9150013a0314e196f4800d3201ae742489d999db2f979f08"
dependencies = [
+ "cfg-if",
+ "futures-util",
"once_cell",
"wasm-bindgen",
]
@@ -1115,9 +1262,9 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "libc"
-version = "0.2.185"
+version = "0.2.186"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52ff2c0fe9bc6cb6b14a0592c2ff4fa9ceb83eea9db979b0487cd054946a2b8f"
+checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66"
[[package]]
name = "litemap"
@@ -1149,6 +1296,12 @@ dependencies = [
"regex-automata",
]
+[[package]]
+name = "matchit"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
+
[[package]]
name = "matchit"
version = "0.8.4"
@@ -1378,6 +1531,26 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+[[package]]
+name = "pin-project"
+version = "1.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbf0d9e68100b3a7989b4901972f265cd542e560a3a8a724e1e20322f4d06ce9"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a990e22f43e84855daf260dded30524ef4a9021cc7541c26540500a50b624389"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "pin-project-lite"
version = "0.2.17"
@@ -1407,7 +1580,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "092791278e026273c1b65bbdcfbba3a300f2994c896bd01ab01da613c29c46f1"
dependencies = [
"base64",
- "indexmap",
+ "indexmap 2.14.0",
"quick-xml",
"serde",
"time",
@@ -1483,6 +1656,29 @@ dependencies = [
"unicode-ident",
]
+[[package]]
+name = "prost"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
+dependencies = [
+ "bytes",
+ "prost-derive",
+]
+
+[[package]]
+name = "prost-derive"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
+dependencies = [
+ "anyhow",
+ "itertools",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "quick-xml"
version = "0.39.4"
@@ -1648,6 +1844,12 @@ dependencies = [
"windows-sys 0.52.0",
]
+[[package]]
+name = "roxmltree"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c20b6793b5c2fa6553b250154b78d6d0db37e72700ae35fad9387a46f487c97"
+
[[package]]
name = "rtcp"
version = "0.12.0"
@@ -1694,9 +1896,9 @@ dependencies = [
[[package]]
name = "rustls"
-version = "0.23.38"
+version = "0.23.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69f9466fb2c14ea04357e91413efb882e2a6d4a406e625449bc0a5d360d53a21"
+checksum = "ef86cd5876211988985292b91c96a8f2d298df24e75989a43a3c73f2d4d8168b"
dependencies = [
"once_cell",
"ring",
@@ -1708,9 +1910,9 @@ dependencies = [
[[package]]
name = "rustls-pki-types"
-version = "1.14.0"
+version = "1.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
+checksum = "30a7197ae7eb376e574fe940d068c30fe0462554a3ddbe4eca7838e049c937a9"
dependencies = [
"zeroize",
]
@@ -1904,7 +2106,7 @@ name = "simdeck-server"
version = "0.1.0"
dependencies = [
"anyhow",
- "axum",
+ "axum 0.8.9",
"base64",
"bytes",
"cc",
@@ -1914,6 +2116,8 @@ dependencies = [
"http",
"libc",
"plist",
+ "prost",
+ "roxmltree",
"serde",
"serde_json",
"sha2",
@@ -1921,6 +2125,7 @@ dependencies = [
"tokio",
"tokio-stream",
"tokio-tungstenite",
+ "tonic",
"tower-http",
"tracing",
"tracing-subscriber",
@@ -2144,9 +2349,9 @@ dependencies = [
[[package]]
name = "tokio"
-version = "1.52.1"
+version = "1.52.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b67dee974fe86fd92cc45b7a95fdd2f99a36a6d7b0d431a231178d3d670bbcc6"
+checksum = "8fc7f01b389ac15039e4dc9531aa973a135d7a4135281b12d7c1bc79fd57fffe"
dependencies = [
"bytes",
"libc",
@@ -2206,6 +2411,56 @@ dependencies = [
"tokio",
]
+[[package]]
+name = "tonic"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52"
+dependencies = [
+ "async-stream",
+ "async-trait",
+ "axum 0.7.9",
+ "base64",
+ "bytes",
+ "h2",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-timeout",
+ "hyper-util",
+ "percent-encoding",
+ "pin-project",
+ "prost",
+ "socket2 0.5.10",
+ "tokio",
+ "tokio-stream",
+ "tower 0.4.13",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "indexmap 1.9.3",
+ "pin-project",
+ "pin-project-lite",
+ "rand 0.8.6",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
[[package]]
name = "tower"
version = "0.5.3"
@@ -2224,9 +2479,9 @@ dependencies = [
[[package]]
name = "tower-http"
-version = "0.6.8"
+version = "0.6.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
+checksum = "68d6fdd9f81c2819c9a8b0e0cd91660e7746a8e6ea2ba7c6b2b057985f6bcb51"
dependencies = [
"bitflags 2.11.1",
"bytes",
@@ -2322,6 +2577,12 @@ dependencies = [
"tracing-log",
]
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
[[package]]
name = "tungstenite"
version = "0.29.0"
@@ -2455,6 +2716,15 @@ dependencies = [
"atomic-waker",
]
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
@@ -2481,9 +2751,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
-version = "0.2.118"
+version = "0.2.121"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89"
+checksum = "49ace1d07c165b0864824eee619580c4689389afa9dc9ed3a4c75040d82e6790"
dependencies = [
"cfg-if",
"once_cell",
@@ -2494,9 +2764,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
-version = "0.2.118"
+version = "0.2.121"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed"
+checksum = "8e68e6f4afd367a562002c05637acb8578ff2dea1943df76afb9e83d177c8578"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -2504,9 +2774,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
-version = "0.2.118"
+version = "0.2.121"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904"
+checksum = "d95a9ec35c64b2a7cb35d3fead40c4238d0940c86d107136999567a4703259f2"
dependencies = [
"bumpalo",
"proc-macro2",
@@ -2517,9 +2787,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
-version = "0.2.118"
+version = "0.2.121"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129"
+checksum = "c4e0100b01e9f0d03189a92b96772a1fb998639d981193d7dbab487302513441"
dependencies = [
"unicode-ident",
]
@@ -2541,7 +2811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
dependencies = [
"anyhow",
- "indexmap",
+ "indexmap 2.14.0",
"wasm-encoder",
"wasmparser",
]
@@ -2554,7 +2824,7 @@ checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags 2.11.1",
"hashbrown 0.15.5",
- "indexmap",
+ "indexmap 2.14.0",
"semver",
]
@@ -2911,7 +3181,7 @@ checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
dependencies = [
"anyhow",
"heck",
- "indexmap",
+ "indexmap 2.14.0",
"prettyplease",
"syn",
"wasm-metadata",
@@ -2942,7 +3212,7 @@ checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags 2.11.1",
- "indexmap",
+ "indexmap 2.14.0",
"log",
"serde",
"serde_derive",
@@ -2961,7 +3231,7 @@ checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
dependencies = [
"anyhow",
"id-arena",
- "indexmap",
+ "indexmap 2.14.0",
"log",
"semver",
"serde",
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 90270a9b..0a376bc6 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -15,6 +15,8 @@ hex = "0.4"
http = "1.1"
libc = "0.2"
plist = "1.7"
+prost = "0.13"
+roxmltree = "0.20"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10"
@@ -22,6 +24,7 @@ thiserror = "2.0"
tokio = { version = "1.42", features = ["fs", "io-util", "macros", "process", "rt-multi-thread", "signal", "sync", "time"] }
tokio-stream = "0.1"
tokio-tungstenite = "0.29"
+tonic = { version = "0.12", features = ["transport"] }
tower-http = { version = "0.6", features = ["cors", "fs", "trace"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
diff --git a/server/src/android.rs b/server/src/android.rs
new file mode 100644
index 00000000..05cf00f1
--- /dev/null
+++ b/server/src/android.rs
@@ -0,0 +1,1355 @@
+use crate::error::AppError;
+use bytes::BytesMut;
+use http::uri::PathAndQuery;
+use serde_json::{json, Value};
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::ffi::OsString;
+use std::future::Future;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::sync::{Mutex, OnceLock};
+use std::thread;
+use std::time::{Duration, Instant};
+use tonic::metadata::MetadataValue;
+use tonic::transport::{Channel, Endpoint};
+
+const ANDROID_ID_PREFIX: &str = "android:";
+const DEFAULT_GRPC_PORT_BASE: u16 = 8554;
+const DEFAULT_ANDROID_STREAM_MAX_EDGE: u32 = 960;
+const ANDROID_TOUCH_IDENTIFIER: i32 = 1;
+const RUNNING_EMULATOR_CACHE_TTL: Duration = Duration::from_secs(2);
+const AVD_GRPC_PORT_CACHE_TTL: Duration = Duration::from_secs(60);
+const SCREEN_SIZE_CACHE_TTL: Duration = Duration::from_secs(60);
+
+#[derive(Clone, Default)]
+pub struct AndroidBridge;
+
+#[derive(Clone, Debug)]
+pub struct AndroidDevice {
+ pub avd_name: String,
+ pub serial: Option,
+ pub is_booted: bool,
+ pub grpc_port: u16,
+}
+
+#[derive(Debug)]
+pub struct AndroidFrame {
+ pub width: u32,
+ pub height: u32,
+ pub seq: u32,
+ pub timestamp_us: u64,
+ pub rgba: Vec,
+}
+
+pub struct AndroidGrpcFrameStream {
+ inner: tonic::Streaming,
+}
+
+pub fn is_android_id(id: &str) -> bool {
+ id.starts_with(ANDROID_ID_PREFIX)
+}
+
+pub fn avd_from_id(id: &str) -> Result {
+ id.strip_prefix(ANDROID_ID_PREFIX)
+ .filter(|value| !value.trim().is_empty())
+ .map(ToOwned::to_owned)
+ .ok_or_else(|| AppError::bad_request(format!("Invalid Android emulator id `{id}`.")))
+}
+
+pub fn id_for_avd(avd_name: &str) -> String {
+ format!("{ANDROID_ID_PREFIX}{avd_name}")
+}
+
+impl AndroidBridge {
+ pub fn list_devices(&self) -> Result, AppError> {
+ if !self.emulator_path().exists() {
+ return Ok(Vec::new());
+ }
+
+ let avds = self
+ .run_emulator(["-list-avds"])?
+ .lines()
+ .map(str::trim)
+ .filter(|line| !line.is_empty())
+ .map(ToOwned::to_owned)
+ .collect::>();
+ if avds.is_empty() {
+ return Ok(Vec::new());
+ }
+
+ let running = self.running_emulators().unwrap_or_default();
+ Ok(avds
+ .into_iter()
+ .enumerate()
+ .map(|(index, avd_name)| AndroidDevice {
+ serial: running.get(&avd_name).cloned(),
+ is_booted: running.contains_key(&avd_name),
+ grpc_port: DEFAULT_GRPC_PORT_BASE + index as u16,
+ avd_name,
+ })
+ .collect())
+ }
+
+ pub fn enrich_devices(&self, devices: Vec) -> Vec {
+ devices
+ .into_iter()
+ .map(|device| self.device_value(device))
+ .collect()
+ }
+
+ pub fn boot(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ if self.resolve_serial(&avd_name).is_ok() {
+ return Ok(());
+ }
+ let grpc_port = self.grpc_port_for_avd(&avd_name)?;
+ Command::new(self.emulator_path())
+ .args([
+ "-avd",
+ &avd_name,
+ "-no-window",
+ "-no-audio",
+ "-gpu",
+ "swiftshader_indirect",
+ "-grpc",
+ &grpc_port.to_string(),
+ ])
+ .stdin(Stdio::null())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .spawn()
+ .map_err(|error| {
+ AppError::native(format!(
+ "Unable to start Android emulator `{avd_name}`: {error}"
+ ))
+ })?;
+ Ok(())
+ }
+
+ pub fn shutdown(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let serial = self.resolve_serial(&avd_name)?;
+ let _ = self.run_adb(["-s", &serial, "emu", "kill"])?;
+ Ok(())
+ }
+
+ pub fn erase(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ if self.resolve_serial(&avd_name).is_ok() {
+ return Err(AppError::bad_request(
+ "Shutdown the Android emulator before erasing it.",
+ ));
+ }
+ let avd_dir = self.avd_dir(&avd_name);
+ for file_name in [
+ "userdata-qemu.img",
+ "cache.img",
+ "data.img",
+ "sdcard.img",
+ "snapshots.img",
+ ] {
+ let path = avd_dir.join(file_name);
+ if path.exists() {
+ std::fs::remove_file(&path).map_err(|error| {
+ AppError::native(format!("Unable to remove {}: {error}", path.display()))
+ })?;
+ }
+ }
+ Ok(())
+ }
+
+ pub fn wait_until_booted(&self, id: &str, timeout_duration: Duration) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let deadline = Instant::now() + timeout_duration;
+ loop {
+ if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if self
+ .run_adb(["-s", &serial, "shell", "getprop", "sys.boot_completed"])
+ .unwrap_or_default()
+ .trim()
+ == "1"
+ {
+ return Ok(());
+ }
+ }
+ if Instant::now() >= deadline {
+ return Err(AppError::native(format!(
+ "Android emulator `{avd_name}` did not finish booting in time."
+ )));
+ }
+ thread::sleep(Duration::from_millis(500));
+ }
+ }
+
+ pub fn screenshot_png(&self, id: &str) -> Result, AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb_bytes(["-s", &serial, "exec-out", "screencap", "-p"])
+ }
+
+ pub fn install_app(&self, id: &str, app_path: &str) -> Result<(), AppError> {
+ if !app_path.ends_with(".apk") {
+ return Err(AppError::bad_request(
+ "Android install expects an `.apk` path.",
+ ));
+ }
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "install", "-r", app_path])?;
+ Ok(())
+ }
+
+ pub fn uninstall_app(&self, id: &str, package_name: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "uninstall", package_name])?;
+ Ok(())
+ }
+
+ pub fn open_url(&self, id: &str, url: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "am",
+ "start",
+ "-a",
+ "android.intent.action.VIEW",
+ "-d",
+ url,
+ ])?;
+ Ok(())
+ }
+
+ pub fn launch_package(&self, id: &str, package: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "monkey",
+ "-p",
+ package,
+ "-c",
+ "android.intent.category.LAUNCHER",
+ "1",
+ ])?;
+ Ok(())
+ }
+
+ pub fn set_pasteboard_text(&self, id: &str, text: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb_shell(&serial, &format!("cmd clipboard set {}", shell_quote(text)))?;
+ Ok(())
+ }
+
+ pub fn pasteboard_text(&self, id: &str) -> Result {
+ let serial = self.serial_for_id(id)?;
+ Ok(self.run_adb_shell(&serial, "cmd clipboard get")?)
+ }
+
+ pub fn send_touch(&self, id: &str, x: f64, y: f64, phase: &str) -> Result<(), AppError> {
+ if self.send_touch_grpc(id, x, y, phase).is_ok() {
+ return Ok(());
+ }
+ if phase != "ended" && phase != "cancelled" {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let px = (x.clamp(0.0, 1.0) * (width - 1.0)).round().max(0.0);
+ let py = (y.clamp(0.0, 1.0) * (height - 1.0)).round().max(0.0);
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "tap",
+ &px.to_string(),
+ &py.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn send_swipe(
+ &self,
+ id: &str,
+ start_x: f64,
+ start_y: f64,
+ end_x: f64,
+ end_y: f64,
+ duration_ms: u64,
+ ) -> Result<(), AppError> {
+ if self
+ .send_swipe_grpc(id, start_x, start_y, end_x, end_y, duration_ms)
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let coords = [start_x, start_y, end_x, end_y]
+ .into_iter()
+ .enumerate()
+ .map(|(index, value)| {
+ let max = if index % 2 == 0 {
+ width - 1.0
+ } else {
+ height - 1.0
+ };
+ (value.clamp(0.0, 1.0) * max).round().max(0.0).to_string()
+ })
+ .collect::>();
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "swipe",
+ &coords[0],
+ &coords[1],
+ &coords[2],
+ &coords[3],
+ &duration_ms.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn send_key(&self, id: &str, key_code: u16, _modifiers: u32) -> Result<(), AppError> {
+ if self
+ .send_key_grpc(id, grpc::KeyboardEvent::usb_keypress(i32::from(key_code)))
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let android_key = android_key_code(key_code);
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "keyevent",
+ &android_key.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn type_text(&self, id: &str, text: &str) -> Result<(), AppError> {
+ if self
+ .send_key_grpc(id, grpc::KeyboardEvent::text(text.to_owned()))
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let escaped = text.replace('%', "%25").replace(' ', "%s");
+ self.run_adb(["-s", &serial, "shell", "input", "text", &escaped])?;
+ Ok(())
+ }
+
+ pub fn press_home(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "3"])?;
+ Ok(())
+ }
+
+ pub fn open_app_switcher(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "187"])?;
+ Ok(())
+ }
+
+ pub fn press_button(&self, id: &str, button: &str, duration_ms: u32) -> Result<(), AppError> {
+ match button {
+ "home" => self.press_home(id),
+ "lock" | "side-button" => {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "26"])?;
+ if duration_ms > 500 {
+ thread::sleep(Duration::from_millis(u64::from(duration_ms)));
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "26"])?;
+ }
+ Ok(())
+ }
+ "back" => {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "4"])?;
+ Ok(())
+ }
+ _ => Err(AppError::bad_request(format!(
+ "Unsupported Android hardware button `{button}`."
+ ))),
+ }
+ }
+
+ pub fn rotate_right(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "emu", "rotate"])?;
+ Ok(())
+ }
+
+ pub fn toggle_appearance(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ let current = self.run_adb_shell(&serial, "cmd uimode night")?;
+ let mode = if current.to_lowercase().contains("yes") {
+ "no"
+ } else {
+ "yes"
+ };
+ self.run_adb(["-s", &serial, "shell", "cmd", "uimode", "night", mode])?;
+ Ok(())
+ }
+
+ pub fn logs(&self, id: &str, limit: usize) -> Result, AppError> {
+ let serial = self.serial_for_id(id)?;
+ let raw = self.run_adb([
+ "-s",
+ &serial,
+ "logcat",
+ "-d",
+ "-v",
+ "threadtime",
+ "-t",
+ &limit.max(1).to_string(),
+ ])?;
+ Ok(raw
+ .lines()
+ .map(|line| {
+ json!({
+ "timestamp": "",
+ "level": android_log_level(line),
+ "process": "",
+ "pid": Value::Null,
+ "subsystem": "android",
+ "category": "logcat",
+ "message": line,
+ })
+ })
+ .collect())
+ }
+
+ pub fn chrome_profile(&self, id: &str) -> Result {
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let horizontal_bezel = (width * 0.055).clamp(48.0, 80.0);
+ let vertical_bezel = (height * 0.04).clamp(64.0, 104.0);
+ Ok(json!({
+ "totalWidth": width + horizontal_bezel * 2.0,
+ "totalHeight": height + vertical_bezel * 2.0,
+ "screenX": horizontal_bezel,
+ "screenY": vertical_bezel,
+ "screenWidth": width,
+ "screenHeight": height,
+ "cornerRadius": (width * 0.055).clamp(32.0, 56.0),
+ "hasScreenMask": false,
+ "chromeStyle": "css-android",
+ }))
+ }
+
+ pub async fn grpc_frame_stream(
+ &self,
+ id: &str,
+ max_edge: Option,
+ ) -> Result {
+ let avd_name = avd_from_id(id)?;
+ let port = self.grpc_port_for_avd(&avd_name)?;
+ let mut format = grpc::ImageFormat {
+ format: grpc::image_format::ImgFormat::Rgba8888 as i32,
+ width: 0,
+ height: 0,
+ display: 0,
+ transport: None,
+ };
+ if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if let Ok((width, height)) = self.screen_size_for_serial(&serial) {
+ let max_edge = max_edge
+ .unwrap_or(DEFAULT_ANDROID_STREAM_MAX_EDGE)
+ .clamp(240, 2400) as f64;
+ let largest = width.max(height);
+ if largest > max_edge {
+ let scale = max_edge / largest;
+ format.width = (width * scale).round().max(1.0) as u32;
+ format.height = (height * scale).round().max(1.0) as u32;
+ }
+ }
+ }
+
+ let endpoint = Endpoint::from_shared(format!("http://127.0.0.1:{port}"))
+ .map_err(|error| AppError::native(format!("Invalid Android gRPC endpoint: {error}")))?
+ .connect()
+ .await
+ .map_err(|error| {
+ AppError::native(format!(
+ "Unable to connect to Android emulator gRPC: {error}"
+ ))
+ })?;
+ let mut grpc = tonic::client::Grpc::new(endpoint);
+ grpc.ready().await.map_err(|error| {
+ AppError::native(format!("Android emulator gRPC is not ready: {error}"))
+ })?;
+ let path = PathAndQuery::from_static(
+ "/android.emulation.control.EmulatorController/streamScreenshot",
+ );
+ let mut request = tonic::Request::new(format);
+ if let Some(token) = emulator_grpc_token(port) {
+ let value = MetadataValue::try_from(format!("Bearer {token}")).map_err(|error| {
+ AppError::native(format!("Invalid Android emulator gRPC token: {error}"))
+ })?;
+ request.metadata_mut().insert("authorization", value);
+ }
+ let response = grpc
+ .server_streaming(request, path, tonic::codec::ProstCodec::default())
+ .await
+ .map_err(|error| {
+ AppError::native(format!(
+ "Android emulator screenshot stream failed: {error}"
+ ))
+ })?;
+ Ok(AndroidGrpcFrameStream {
+ inner: response.into_inner(),
+ })
+ }
+
+ pub fn accessibility_tree(
+ &self,
+ id: &str,
+ max_depth: Option,
+ ) -> Result {
+ let serial = self.serial_for_id(id)?;
+ let raw = self.run_adb_shell(
+ &serial,
+ "uiautomator dump /sdcard/simdeck_ui.xml >/dev/null && cat /sdcard/simdeck_ui.xml",
+ )?;
+ let xml = extract_xml(&raw);
+ let document = roxmltree::Document::parse(xml).map_err(|error| {
+ AppError::native(format!("Unable to parse UIAutomator XML: {error}"))
+ })?;
+ let mut roots = Vec::new();
+ let root = document.root_element();
+ let max_depth = max_depth.unwrap_or(80).min(80);
+ for child in root.children().filter(|node| node.has_tag_name("node")) {
+ roots.push(android_node_value(child, 0, max_depth));
+ }
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ if roots.is_empty() {
+ roots.push(json!({
+ "type": "screen",
+ "role": "screen",
+ "frame": frame_value(0.0, 0.0, width, height),
+ "children": [],
+ }));
+ }
+ Ok(json!({
+ "source": "android-uiautomator",
+ "availableSources": ["android-uiautomator"],
+ "roots": roots,
+ }))
+ }
+
+ fn send_touch_grpc(&self, id: &str, x: f64, y: f64, phase: &str) -> Result<(), AppError> {
+ self.block_on_grpc(self.send_touch_grpc_async(id, x, y, phase))
+ }
+
+ async fn send_touch_grpc_async(
+ &self,
+ id: &str,
+ x: f64,
+ y: f64,
+ phase: &str,
+ ) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let serial = self.resolve_serial(&avd_name)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let pressure = match phase {
+ "began" | "moved" => 1,
+ "ended" | "cancelled" => 0,
+ _ => return Ok(()),
+ };
+ let event = grpc::TouchEvent {
+ touches: vec![grpc::Touch {
+ x: normalized_to_pixel(x, width),
+ y: normalized_to_pixel(y, height),
+ identifier: ANDROID_TOUCH_IDENTIFIER,
+ pressure,
+ touch_major: 8,
+ touch_minor: 8,
+ expiration: grpc::touch::EventExpiration::NeverExpire as i32,
+ orientation: 0,
+ }],
+ display: 0,
+ };
+ self.grpc_unary_for_avd::(
+ &avd_name,
+ "/android.emulation.control.EmulatorController/sendTouch",
+ event,
+ )
+ .await?;
+ Ok(())
+ }
+
+ fn send_swipe_grpc(
+ &self,
+ id: &str,
+ start_x: f64,
+ start_y: f64,
+ end_x: f64,
+ end_y: f64,
+ duration_ms: u64,
+ ) -> Result<(), AppError> {
+ let duration_ms = duration_ms.clamp(50, 1500);
+ let steps = (duration_ms / 8).clamp(4, 120);
+ self.send_touch_grpc(id, start_x, start_y, "began")?;
+ for step in 1..steps {
+ let t = step as f64 / steps as f64;
+ self.send_touch_grpc(
+ id,
+ start_x + (end_x - start_x) * t,
+ start_y + (end_y - start_y) * t,
+ "moved",
+ )?;
+ thread::sleep(Duration::from_millis((duration_ms / steps).max(1)));
+ }
+ self.send_touch_grpc(id, end_x, end_y, "ended")
+ }
+
+ fn send_key_grpc(&self, id: &str, event: grpc::KeyboardEvent) -> Result<(), AppError> {
+ self.block_on_grpc(async {
+ let avd_name = avd_from_id(id)?;
+ self.grpc_unary_for_avd::(
+ &avd_name,
+ "/android.emulation.control.EmulatorController/sendKey",
+ event,
+ )
+ .await?;
+ Ok(())
+ })
+ }
+
+ fn block_on_grpc(&self, future: F) -> Result
+ where
+ F: Future>,
+ {
+ if let Ok(handle) = tokio::runtime::Handle::try_current() {
+ return handle.block_on(future);
+ }
+ tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()
+ .map_err(|error| AppError::internal(format!("Unable to create gRPC runtime: {error}")))?
+ .block_on(future)
+ }
+
+ async fn grpc_unary_for_avd(
+ &self,
+ avd_name: &str,
+ path: &'static str,
+ request: Req,
+ ) -> Result
+ where
+ Req: prost::Message + Default + Send + 'static,
+ Resp: prost::Message + Default + Send + 'static,
+ {
+ let port = self.grpc_port_for_avd(avd_name)?;
+ let channel = grpc_channel_for_port(port)?;
+ let mut grpc = tonic::client::Grpc::new(channel);
+ grpc.ready().await.map_err(|error| {
+ AppError::native(format!("Android emulator gRPC is not ready: {error}"))
+ })?;
+ let mut request = tonic::Request::new(request);
+ if let Some(token) = emulator_grpc_token(port) {
+ let value = MetadataValue::try_from(format!("Bearer {token}")).map_err(|error| {
+ AppError::native(format!("Invalid Android emulator gRPC token: {error}"))
+ })?;
+ request.metadata_mut().insert("authorization", value);
+ }
+ let response = grpc
+ .unary(
+ request,
+ PathAndQuery::from_static(path),
+ tonic::codec::ProstCodec::default(),
+ )
+ .await
+ .map_err(|error| {
+ AppError::native(format!("Android emulator gRPC input failed: {error}"))
+ })?;
+ Ok(response.into_inner())
+ }
+
+ fn device_value(&self, device: AndroidDevice) -> Value {
+ let id = id_for_avd(&device.avd_name);
+ let private_display = if let Some(serial) = device.serial.as_deref() {
+ let (width, height) = self.screen_size_for_serial(serial).unwrap_or((0.0, 0.0));
+ json!({
+ "displayReady": width > 0.0 && height > 0.0,
+ "displayStatus": "Ready",
+ "displayWidth": width,
+ "displayHeight": height,
+ "frameSequence": 0,
+ "rotationQuarterTurns": 0,
+ })
+ } else {
+ json!({
+ "displayReady": false,
+ "displayStatus": "Boot required",
+ "displayWidth": 0,
+ "displayHeight": 0,
+ "frameSequence": 0,
+ "rotationQuarterTurns": 0,
+ })
+ };
+ json!({
+ "udid": id,
+ "id": id,
+ "platform": "android-emulator",
+ "name": device.avd_name,
+ "state": if device.is_booted { "Booted" } else { "Shutdown" },
+ "isBooted": device.is_booted,
+ "isAvailable": true,
+ "lastBootedAt": Value::Null,
+ "dataPath": self.avd_dir(&device.avd_name),
+ "logPath": Value::Null,
+ "deviceTypeIdentifier": "android-emulator",
+ "deviceTypeName": "Android Emulator",
+ "runtimeIdentifier": "android",
+ "runtimeName": "Android",
+ "android": {
+ "avdName": device.avd_name,
+ "serial": device.serial,
+ "grpcPort": device.grpc_port,
+ },
+ "privateDisplay": private_display,
+ })
+ }
+
+ fn serial_for_id(&self, id: &str) -> Result {
+ self.resolve_serial(&avd_from_id(id)?)
+ }
+
+ fn resolve_serial(&self, avd_name: &str) -> Result {
+ self.running_emulators()?.remove(avd_name).ok_or_else(|| {
+ AppError::native(format!("Android emulator `{avd_name}` is not running."))
+ })
+ }
+
+ fn running_emulators(&self) -> Result, AppError> {
+ static CACHE: OnceLock)>>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(None));
+ if let Some((updated_at, running)) = cache.lock().unwrap().as_ref() {
+ if updated_at.elapsed() < RUNNING_EMULATOR_CACHE_TTL {
+ return Ok(running.clone());
+ }
+ }
+ if !self.adb_path().exists() {
+ return Ok(HashMap::new());
+ }
+ let output = self.run_adb(["devices"])?;
+ let mut result = HashMap::new();
+ for line in output.lines().skip(1) {
+ let mut parts = line.split_whitespace();
+ let Some(serial) = parts.next() else { continue };
+ let Some(state) = parts.next() else { continue };
+ if state != "device" || !serial.starts_with("emulator-") {
+ continue;
+ }
+ if let Ok(name_output) = self.run_adb(["-s", serial, "emu", "avd", "name"]) {
+ if let Some(name) = name_output
+ .lines()
+ .map(str::trim)
+ .find(|line| !line.is_empty() && *line != "OK")
+ {
+ result.insert(name.to_owned(), serial.to_owned());
+ }
+ }
+ }
+ *cache.lock().unwrap() = Some((Instant::now(), result.clone()));
+ Ok(result)
+ }
+
+ fn grpc_port_for_avd(&self, avd_name: &str) -> Result {
+ static CACHE: OnceLock)>>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(None));
+ if let Some((updated_at, ports)) = cache.lock().unwrap().as_ref() {
+ if updated_at.elapsed() < AVD_GRPC_PORT_CACHE_TTL {
+ if let Some(port) = ports.get(avd_name) {
+ return Ok(*port);
+ }
+ }
+ }
+
+ let ports = self
+ .run_emulator(["-list-avds"])?
+ .lines()
+ .map(str::trim)
+ .filter(|line| !line.is_empty())
+ .enumerate()
+ .map(|(index, name)| (name.to_owned(), DEFAULT_GRPC_PORT_BASE + index as u16))
+ .collect::>();
+ let port = ports
+ .get(avd_name)
+ .copied()
+ .ok_or_else(|| AppError::not_found(format!("Unknown Android AVD `{avd_name}`.")))?;
+ *cache.lock().unwrap() = Some((Instant::now(), ports));
+ Ok(port)
+ }
+
+ fn screen_size_for_serial(&self, serial: &str) -> Result<(f64, f64), AppError> {
+ static CACHE: OnceLock>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(HashMap::new()));
+ if let Some((updated_at, size)) = cache.lock().unwrap().get(serial) {
+ if updated_at.elapsed() < SCREEN_SIZE_CACHE_TTL {
+ return Ok(*size);
+ }
+ }
+ let output = self.run_adb(["-s", serial, "shell", "wm", "size"])?;
+ let size = output
+ .split_whitespace()
+ .find(|part| part.contains('x'))
+ .ok_or_else(|| AppError::native("Android emulator did not report a screen size."))?;
+ let (width, height) = size
+ .split_once('x')
+ .ok_or_else(|| AppError::native("Android emulator reported an invalid screen size."))?;
+ let width = width
+ .parse::()
+ .map_err(|_| AppError::native("Android emulator reported an invalid width."))?;
+ let height = height
+ .parse::()
+ .map_err(|_| AppError::native("Android emulator reported an invalid height."))?;
+ cache
+ .lock()
+ .unwrap()
+ .insert(serial.to_owned(), (Instant::now(), (width, height)));
+ Ok((width, height))
+ }
+
+ fn run_adb_shell(&self, serial: &str, script: &str) -> Result {
+ self.run_adb(["-s", serial, "shell", script])
+ }
+
+ fn run_adb(&self, args: [&str; N]) -> Result {
+ run_command_text(self.adb_path(), args)
+ }
+
+ fn run_adb_bytes(&self, args: [&str; N]) -> Result, AppError> {
+ run_command_bytes(self.adb_path(), args)
+ }
+
+ fn run_emulator(&self, args: [&str; N]) -> Result {
+ run_command_text(self.emulator_path(), args)
+ }
+
+ fn adb_path(&self) -> PathBuf {
+ sdk_root().join("platform-tools/adb")
+ }
+
+ fn emulator_path(&self) -> PathBuf {
+ sdk_root().join("emulator/emulator")
+ }
+
+ fn avd_dir(&self, avd_name: &str) -> PathBuf {
+ home_dir().join(format!(".android/avd/{avd_name}.avd"))
+ }
+}
+
+impl AndroidGrpcFrameStream {
+ pub async fn next_frame(&mut self) -> Result, AppError> {
+ let Some(image) = self.inner.message().await.map_err(|error| {
+ AppError::native(format!(
+ "Android emulator screenshot stream failed: {error}"
+ ))
+ })?
+ else {
+ return Ok(None);
+ };
+ let format = image.format.ok_or_else(|| {
+ AppError::native("Android emulator screenshot did not include an image format.")
+ })?;
+ let width = if format.width > 0 {
+ format.width
+ } else {
+ image.width
+ };
+ let height = if format.height > 0 {
+ format.height
+ } else {
+ image.height
+ };
+ if width == 0 || height == 0 {
+ return Err(AppError::native(
+ "Android emulator screenshot did not include dimensions.",
+ ));
+ }
+ let rgba = rgba_display_order(
+ &image.image,
+ width,
+ height,
+ grpc::image_format::ImgFormat::try_from(format.format)
+ .unwrap_or(grpc::image_format::ImgFormat::Rgba8888),
+ )?;
+ Ok(Some(AndroidFrame {
+ width,
+ height,
+ seq: image.seq,
+ timestamp_us: image.timestamp_us,
+ rgba,
+ }))
+ }
+}
+
+fn run_command_text(program: PathBuf, args: [&str; N]) -> Result {
+ let output = run_command(program, args)?;
+ String::from_utf8(output)
+ .map_err(|error| AppError::native(format!("Command returned non-UTF8 output: {error}")))
+}
+
+fn run_command_bytes(
+ program: PathBuf,
+ args: [&str; N],
+) -> Result, AppError> {
+ run_command(program, args)
+}
+
+fn run_command(program: PathBuf, args: [&str; N]) -> Result, AppError> {
+ if !program.exists() {
+ return Err(AppError::native(format!(
+ "Android SDK binary not found at {}.",
+ program.display()
+ )));
+ }
+ let output = Command::new(&program)
+ .args(args)
+ .env("ANDROID_HOME", sdk_root())
+ .env("ANDROID_SDK_ROOT", sdk_root())
+ .env("JAVA_HOME", java_home())
+ .output()
+ .map_err(|error| {
+ AppError::native(format!("Unable to run {}: {error}", program.display()))
+ })?;
+ if output.status.success() {
+ return Ok(output.stdout);
+ }
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ let stdout = String::from_utf8_lossy(&output.stdout);
+ Err(AppError::native(format!(
+ "{} failed: {}{}",
+ program
+ .file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or("Android command"),
+ stderr.trim(),
+ if stdout.trim().is_empty() {
+ String::new()
+ } else {
+ format!(" {}", stdout.trim())
+ }
+ )))
+}
+
+fn grpc_channel_for_port(port: u16) -> Result {
+ static CHANNELS: OnceLock>> = OnceLock::new();
+ let channels = CHANNELS.get_or_init(|| Mutex::new(HashMap::new()));
+ let mut channels = channels.lock().unwrap();
+ if let Some(channel) = channels.get(&port) {
+ return Ok(channel.clone());
+ }
+ let endpoint = Endpoint::from_shared(format!("http://127.0.0.1:{port}"))
+ .map_err(|error| AppError::native(format!("Invalid Android gRPC endpoint: {error}")))?;
+ let channel = endpoint.connect_lazy();
+ channels.insert(port, channel.clone());
+ Ok(channel)
+}
+
+fn normalized_to_pixel(value: f64, extent: f64) -> i32 {
+ (value.clamp(0.0, 1.0) * (extent - 1.0).max(0.0))
+ .round()
+ .max(0.0) as i32
+}
+
+fn sdk_root() -> PathBuf {
+ env::var_os("ANDROID_HOME")
+ .or_else(|| env::var_os("ANDROID_SDK_ROOT"))
+ .map(PathBuf::from)
+ .filter(|path| path.exists())
+ .unwrap_or_else(|| home_dir().join("Library/Android/sdk"))
+}
+
+fn java_home() -> OsString {
+ env::var_os("JAVA_HOME").unwrap_or_else(|| OsString::from("/opt/homebrew/opt/openjdk"))
+}
+
+fn home_dir() -> PathBuf {
+ env::var_os("HOME")
+ .map(PathBuf::from)
+ .unwrap_or_else(|| Path::new("/").to_path_buf())
+}
+
+fn emulator_grpc_token(port: u16) -> Option {
+ per_instance_grpc_token(port).or_else(global_grpc_token)
+}
+
+fn per_instance_grpc_token(port: u16) -> Option {
+ let running_dir = home_dir().join("Library/Caches/TemporaryItems/avd/running");
+ let entries = std::fs::read_dir(running_dir).ok()?;
+ let port_value = port.to_string();
+ for entry in entries.flatten() {
+ let path = entry.path();
+ if path.extension().and_then(|ext| ext.to_str()) != Some("ini") {
+ continue;
+ }
+ let contents = std::fs::read_to_string(path).ok()?;
+ let fields = parse_ini(&contents);
+ if fields.get("grpc.port") == Some(&port_value) {
+ if let Some(token) = fields.get("grpc.token").filter(|token| !token.is_empty()) {
+ return Some(token.to_owned());
+ }
+ }
+ }
+ None
+}
+
+fn global_grpc_token() -> Option {
+ std::fs::read_to_string(home_dir().join(".emulator_console_auth_token"))
+ .ok()
+ .map(|token| token.trim().to_owned())
+ .filter(|token| !token.is_empty())
+}
+
+fn parse_ini(contents: &str) -> HashMap {
+ contents
+ .lines()
+ .filter_map(|line| {
+ let line = line.trim();
+ let (key, value) = line.split_once('=')?;
+ Some((key.trim().to_owned(), value.trim().to_owned()))
+ })
+ .collect()
+}
+
+fn rgba_display_order(
+ image: &[u8],
+ width: u32,
+ height: u32,
+ format: grpc::image_format::ImgFormat,
+) -> Result, AppError> {
+ let width = width as usize;
+ let height = height as usize;
+ match format {
+ grpc::image_format::ImgFormat::Rgba8888 => {
+ let row_len = width * 4;
+ if image.len() < row_len * height {
+ return Err(AppError::native(
+ "Android emulator returned a truncated RGBA frame.",
+ ));
+ }
+ Ok(image[..row_len * height].to_vec())
+ }
+ grpc::image_format::ImgFormat::Rgb888 => {
+ let src_row_len = width * 3;
+ if image.len() < src_row_len * height {
+ return Err(AppError::native(
+ "Android emulator returned a truncated RGB frame.",
+ ));
+ }
+ let mut out = BytesMut::with_capacity(width * height * 4);
+ out.resize(width * height * 4, 255);
+ for y in 0..height {
+ let src_row = y * src_row_len;
+ let dst_row = y * width * 4;
+ for x in 0..width {
+ let src = src_row + x * 3;
+ let dst = dst_row + x * 4;
+ out[dst] = image[src];
+ out[dst + 1] = image[src + 1];
+ out[dst + 2] = image[src + 2];
+ out[dst + 3] = 255;
+ }
+ }
+ Ok(out.to_vec())
+ }
+ grpc::image_format::ImgFormat::Png => Err(AppError::native(
+ "Android emulator gRPC returned PNG instead of raw pixels.",
+ )),
+ }
+}
+
+fn extract_xml(output: &str) -> &str {
+ output
+ .find(", depth: usize, max_depth: usize) -> Value {
+ let bounds = parse_bounds(node.attribute("bounds").unwrap_or(""));
+ let class_name = node.attribute("class").unwrap_or("");
+ let short_class = class_name.rsplit('.').next().unwrap_or(class_name);
+ let text = node.attribute("text").unwrap_or("");
+ let content_desc = node.attribute("content-desc").unwrap_or("");
+ let label = if !text.is_empty() { text } else { content_desc };
+ let mut children = Vec::new();
+ if depth < max_depth {
+ for child in node.children().filter(|child| child.has_tag_name("node")) {
+ children.push(android_node_value(child, depth + 1, max_depth));
+ }
+ }
+ json!({
+ "source": "android-uiautomator",
+ "type": map_android_class(short_class),
+ "role": map_android_class(short_class),
+ "className": class_name,
+ "AXIdentifier": node.attribute("resource-id").unwrap_or(""),
+ "AXLabel": label,
+ "AXValue": text,
+ "text": text,
+ "title": label,
+ "enabled": node.attribute("enabled") == Some("true"),
+ "isHidden": node.attribute("visible-to-user") == Some("false"),
+ "frame": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
+ "frameInScreen": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
+ "children": children,
+ })
+}
+
+fn parse_bounds(value: &str) -> (f64, f64, f64, f64) {
+ let numbers = value
+ .replace("][", ",")
+ .replace(['[', ']'], "")
+ .split(',')
+ .filter_map(|part| part.parse::().ok())
+ .collect::>();
+ if numbers.len() != 4 {
+ return (0.0, 0.0, 0.0, 0.0);
+ }
+ (
+ numbers[0],
+ numbers[1],
+ (numbers[2] - numbers[0]).max(0.0),
+ (numbers[3] - numbers[1]).max(0.0),
+ )
+}
+
+fn frame_value(x: f64, y: f64, width: f64, height: f64) -> Value {
+ json!({ "x": x, "y": y, "width": width, "height": height })
+}
+
+fn map_android_class(class_name: &str) -> &'static str {
+ match class_name {
+ "Button" | "ImageButton" | "FloatingActionButton" => "button",
+ "EditText" => "textField",
+ "TextView" => "staticText",
+ "ImageView" => "image",
+ "CheckBox" => "checkBox",
+ "RadioButton" => "radioButton",
+ "Switch" | "ToggleButton" => "switch",
+ "SeekBar" => "slider",
+ "RecyclerView" | "ListView" => "table",
+ "ScrollView" | "HorizontalScrollView" | "NestedScrollView" => "scrollView",
+ "WebView" => "webView",
+ _ => "other",
+ }
+}
+
+fn android_key_code(hid: u16) -> u16 {
+ match hid {
+ 40 => 66,
+ 41 => 111,
+ 42 => 67,
+ 43 => 61,
+ 44 => 62,
+ 79 => 22,
+ 80 => 21,
+ 81 => 20,
+ 82 => 19,
+ _ => hid,
+ }
+}
+
+fn android_log_level(line: &str) -> &'static str {
+ if line.contains(" E ") {
+ "error"
+ } else if line.contains(" W ") {
+ "warning"
+ } else if line.contains(" D ") {
+ "debug"
+ } else {
+ "info"
+ }
+}
+
+fn shell_quote(value: &str) -> String {
+ format!("'{}'", value.replace('\'', "'\\''"))
+}
+
+#[allow(dead_code)]
+fn _dedupe(values: impl IntoIterator- ) -> Vec
{
+ let mut seen = HashSet::new();
+ values
+ .into_iter()
+ .filter(|value| seen.insert(value.clone()))
+ .collect()
+}
+
+mod grpc {
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Empty {}
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Touch {
+ #[prost(int32, tag = "1")]
+ pub x: i32,
+ #[prost(int32, tag = "2")]
+ pub y: i32,
+ #[prost(int32, tag = "3")]
+ pub identifier: i32,
+ #[prost(int32, tag = "4")]
+ pub pressure: i32,
+ #[prost(int32, tag = "5")]
+ pub touch_major: i32,
+ #[prost(int32, tag = "6")]
+ pub touch_minor: i32,
+ #[prost(enumeration = "touch::EventExpiration", tag = "7")]
+ pub expiration: i32,
+ #[prost(int32, tag = "8")]
+ pub orientation: i32,
+ }
+
+ pub mod touch {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum EventExpiration {
+ Unspecified = 0,
+ NeverExpire = 1,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct TouchEvent {
+ #[prost(message, repeated, tag = "1")]
+ pub touches: Vec,
+ #[prost(int32, tag = "2")]
+ pub display: i32,
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct KeyboardEvent {
+ #[prost(enumeration = "keyboard_event::KeyCodeType", tag = "1")]
+ pub code_type: i32,
+ #[prost(enumeration = "keyboard_event::KeyEventType", tag = "2")]
+ pub event_type: i32,
+ #[prost(int32, tag = "3")]
+ pub key_code: i32,
+ #[prost(string, tag = "4")]
+ pub key: String,
+ #[prost(string, tag = "5")]
+ pub text: String,
+ }
+
+ impl KeyboardEvent {
+ pub fn usb_keypress(key_code: i32) -> Self {
+ Self {
+ code_type: keyboard_event::KeyCodeType::Usb as i32,
+ event_type: keyboard_event::KeyEventType::Keypress as i32,
+ key_code,
+ key: String::new(),
+ text: String::new(),
+ }
+ }
+
+ pub fn text(text: String) -> Self {
+ Self {
+ code_type: keyboard_event::KeyCodeType::Usb as i32,
+ event_type: keyboard_event::KeyEventType::Keypress as i32,
+ key_code: 0,
+ key: String::new(),
+ text,
+ }
+ }
+ }
+
+ pub mod keyboard_event {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum KeyCodeType {
+ Usb = 0,
+ Evdev = 1,
+ Xkb = 2,
+ Win = 3,
+ Mac = 4,
+ }
+
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum KeyEventType {
+ Keydown = 0,
+ Keyup = 1,
+ Keypress = 2,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct ImageTransport {
+ #[prost(enumeration = "image_transport::TransportChannel", tag = "1")]
+ pub channel: i32,
+ #[prost(string, tag = "2")]
+ pub handle: String,
+ }
+
+ pub mod image_transport {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum TransportChannel {
+ Unspecified = 0,
+ Mmap = 1,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct ImageFormat {
+ #[prost(enumeration = "image_format::ImgFormat", tag = "1")]
+ pub format: i32,
+ #[prost(uint32, tag = "3")]
+ pub width: u32,
+ #[prost(uint32, tag = "4")]
+ pub height: u32,
+ #[prost(uint32, tag = "5")]
+ pub display: u32,
+ #[prost(message, optional, tag = "6")]
+ pub transport: Option,
+ }
+
+ pub mod image_format {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum ImgFormat {
+ Png = 0,
+ Rgba8888 = 1,
+ Rgb888 = 2,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Image {
+ #[prost(message, optional, tag = "1")]
+ pub format: Option,
+ #[prost(uint32, tag = "2")]
+ pub width: u32,
+ #[prost(uint32, tag = "3")]
+ pub height: u32,
+ #[prost(bytes = "vec", tag = "4")]
+ pub image: Vec,
+ #[prost(uint32, tag = "5")]
+ pub seq: u32,
+ #[prost(uint64, tag = "6")]
+ pub timestamp_us: u64,
+ }
+}
diff --git a/server/src/api/routes.rs b/server/src/api/routes.rs
index 39a969c5..72e30aa0 100644
--- a/server/src/api/routes.rs
+++ b/server/src/api/routes.rs
@@ -1,3 +1,4 @@
+use crate::android::{self, AndroidBridge};
use crate::api::json::json;
use crate::auth;
use crate::config::Config;
@@ -54,6 +55,7 @@ pub struct AppState {
pub inspectors: InspectorHub,
pub metrics: Arc,
pub simulator_inventory: SimulatorInventoryCache,
+ pub android: AndroidBridge,
}
#[derive(Clone, Default)]
@@ -469,6 +471,13 @@ struct AccessibilityPointQuery {
y: f64,
}
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct AndroidFrameQuery {
+ max_edge: Option,
+ max_fps: Option,
+}
+
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct AccessibilityTreeQuery {
@@ -582,6 +591,10 @@ pub fn router(state: AppState) -> Router {
.route("/api/simulators/{udid}/control", get(control_socket))
.route("/api/simulators/{udid}/input", get(control_socket))
.route("/api/simulators/{udid}/h264", get(h264_socket))
+ .route(
+ "/api/simulators/{udid}/android/frames",
+ get(android_frame_socket),
+ )
.route("/api/simulators/{udid}/webrtc/offer", post(webrtc_offer))
.route(
"/api/simulators/{udid}/touch-sequence",
@@ -1336,9 +1349,9 @@ async fn inspector_response(
}
async fn list_simulators(State(state): State) -> Result, AppError> {
- let simulators = list_simulators_cached(state.clone(), false).await?;
+ let simulators = all_device_values(state.clone(), false).await?;
Ok(json(json_value!({
- "simulators": state.registry.enrich_simulators(simulators),
+ "simulators": simulators,
})))
}
@@ -1346,6 +1359,15 @@ async fn boot_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state.clone(), move |android| {
+ android.boot(&action_udid)?;
+ android.wait_until_booted(&action_udid, Duration::from_secs(120))
+ })
+ .await?;
+ return simulator_payload(state, udid).await;
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state.clone(), move |bridge| {
@@ -1359,6 +1381,11 @@ async fn shutdown_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state.clone(), move |android| android.shutdown(&action_udid)).await?;
+ return simulator_payload(state, udid).await;
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state.clone(), move |bridge| {
@@ -1372,6 +1399,11 @@ async fn erase_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| android.erase(&action_udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| bridge.erase_simulator(&action_udid)).await?;
@@ -1396,6 +1428,14 @@ async fn install_app(
"Request body must include `appPath`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.install_app(&action_udid, &payload.app_path)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.install_app(&action_udid, &payload.app_path)
@@ -1414,6 +1454,14 @@ async fn uninstall_app(
"Request body must include `bundleId`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.uninstall_app(&action_udid, &payload.bundle_id)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.uninstall_app(&action_udid, &payload.bundle_id)
@@ -1426,6 +1474,10 @@ async fn get_pasteboard(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let text = run_android_action(state, move |android| android.pasteboard_text(&udid)).await?;
+ return Ok(json(json_value!({ "text": text })));
+ }
let text = run_bridge_action(state, move |bridge| bridge.pasteboard_text(&udid)).await?;
Ok(json(json_value!({ "text": text })))
}
@@ -1435,6 +1487,13 @@ async fn set_pasteboard(
Path(udid): Path,
Json(payload): Json,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.set_pasteboard_text(&udid, &payload.text)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
bridge.set_pasteboard_text(&udid, &payload.text)
})
@@ -1446,7 +1505,11 @@ async fn screenshot_png(
State(state): State,
Path(udid): Path,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
- let png = run_bridge_action(state, move |bridge| bridge.screenshot_png(&udid)).await?;
+ let png = if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.screenshot_png(&udid)).await?
+ } else {
+ run_bridge_action(state, move |bridge| bridge.screenshot_png(&udid)).await?
+ };
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
headers.insert(
@@ -1460,6 +1523,10 @@ async fn toggle_appearance(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.toggle_appearance(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| bridge.toggle_appearance(&action_udid)).await?;
Ok(json(json_value!({ "ok": true })))
@@ -1469,6 +1536,9 @@ async fn refresh_stream(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ return Ok(json(json_value!({ "ok": true, "stream": "screenshot" })));
+ }
let session = state.registry.get_or_create_async(&udid).await?;
if let Err(error) = session.ensure_started_async().await {
state.registry.remove(&udid);
@@ -1486,6 +1556,14 @@ async fn open_url(
if payload.url.trim().is_empty() {
return Err(AppError::bad_request("Request body must include `url`."));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.open_url(&action_udid, &payload.url)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.open_url(&action_udid, &payload.url)
@@ -1504,6 +1582,14 @@ async fn launch_bundle(
"Request body must include `bundleId`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.launch_package(&action_udid, &payload.bundle_id)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.launch_bundle(&action_udid, &payload.bundle_id)
@@ -1634,6 +1720,13 @@ async fn send_touch(
let x = payload.x.clamp(0.0, 1.0);
let y = payload.y.clamp(0.0, 1.0);
let phase = payload.phase;
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, &phase)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
input.send_touch(x, y, &phase)
@@ -1664,6 +1757,24 @@ async fn send_touch_sequence(
));
}
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ for event in payload.events {
+ android.send_touch(
+ &udid,
+ event.x.clamp(0.0, 1.0),
+ event.y.clamp(0.0, 1.0),
+ &event.phase,
+ )?;
+ if let Some(delay_ms) = event.delay_ms_after.filter(|delay_ms| *delay_ms > 0) {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for event in payload.events {
@@ -1687,6 +1798,10 @@ async fn control_socket(
Path(udid): Path,
websocket: WebSocketUpgrade,
) -> impl IntoResponse {
+ if android::is_android_id(&udid) {
+ return websocket
+ .on_upgrade(move |socket| handle_android_control_socket(state, udid, socket));
+ }
websocket.on_upgrade(move |socket| handle_control_socket(state, udid, socket))
}
@@ -1699,6 +1814,294 @@ async fn h264_socket(
websocket.on_upgrade(move |socket| handle_h264_socket(state, udid, query, socket))
}
+async fn android_frame_socket(
+ State(state): State,
+ Path(udid): Path,
+ Query(query): Query,
+ websocket: WebSocketUpgrade,
+) -> impl IntoResponse {
+ websocket.on_upgrade(move |socket| {
+ handle_android_frame_socket(state, udid, query.max_edge, query.max_fps, socket)
+ })
+}
+
+async fn handle_android_frame_socket(
+ state: AppState,
+ udid: String,
+ max_edge: Option,
+ max_fps: Option,
+ socket: WebSocket,
+) {
+ let (mut sender, mut receiver) = socket.split();
+ if !android::is_android_id(&udid) {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({
+ "type": "error",
+ "error": "Android frame streaming only supports Android emulator IDs."
+ })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ return;
+ }
+
+ let mut stream = match state.android.grpc_frame_stream(&udid, max_edge).await {
+ Ok(stream) => stream,
+ Err(error) => {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "error", "error": error.to_string() })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ return;
+ }
+ };
+
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "ready", "udid": udid, "platform": "android-emulator" })
+ .to_string()
+ .into(),
+ ))
+ .await;
+
+ let min_frame_gap = max_fps
+ .filter(|fps| *fps > 0)
+ .map(|fps| Duration::from_millis(1000 / u64::from(fps.min(60))))
+ .unwrap_or_else(|| Duration::from_millis(83));
+ let mut last_sent_at = Instant::now() - min_frame_gap;
+
+ loop {
+ tokio::select! {
+ message = receiver.next() => {
+ match message {
+ Some(Ok(Message::Close(_))) | None => break,
+ Some(Ok(_)) => {}
+ Some(Err(_)) => break,
+ }
+ }
+ frame = stream.next_frame() => {
+ let frame = match frame {
+ Ok(Some(frame)) => frame,
+ Ok(None) => break,
+ Err(error) => {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "error", "error": error.to_string() })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ break;
+ }
+ };
+ let now = Instant::now();
+ if now.duration_since(last_sent_at) < min_frame_gap {
+ continue;
+ }
+ last_sent_at = now;
+ if sender
+ .send(Message::Binary(encode_android_frame(frame).into()))
+ .await
+ .is_err()
+ {
+ break;
+ }
+ }
+ }
+ }
+}
+
+fn encode_android_frame(frame: android::AndroidFrame) -> Vec {
+ const HEADER_LEN: usize = 32;
+ let mut bytes = Vec::with_capacity(HEADER_LEN + frame.rgba.len());
+ bytes.extend_from_slice(b"SDAF");
+ bytes.push(1);
+ bytes.push(1);
+ bytes.extend_from_slice(&[0, 0]);
+ bytes.extend_from_slice(&frame.width.to_le_bytes());
+ bytes.extend_from_slice(&frame.height.to_le_bytes());
+ bytes.extend_from_slice(&frame.seq.to_le_bytes());
+ bytes.extend_from_slice(&0u32.to_le_bytes());
+ bytes.extend_from_slice(&frame.timestamp_us.to_le_bytes());
+ bytes.extend_from_slice(&frame.rgba);
+ bytes
+}
+
+async fn handle_android_control_socket(state: AppState, udid: String, socket: WebSocket) {
+ let (mut sender, mut receiver) = socket.split();
+ let mut active_touch: Option = None;
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "ready", "udid": udid, "platform": "android-emulator" })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ while let Some(message) = receiver.next().await {
+ let text = match message {
+ Ok(Message::Text(text)) => text,
+ Ok(Message::Binary(bytes)) => match String::from_utf8(bytes.to_vec()) {
+ Ok(text) => text.into(),
+ Err(_) => continue,
+ },
+ Ok(Message::Close(_)) => break,
+ Ok(Message::Ping(_)) | Ok(Message::Pong(_)) => continue,
+ Err(_) => break,
+ };
+ let Ok(control_message) = serde_json::from_str::(&text) else {
+ continue;
+ };
+ let state = state.clone();
+ let udid = udid.clone();
+ let _ = run_android_control_message(state, udid, control_message, &mut active_touch).await;
+ }
+}
+
+struct AndroidControlTouch {
+ started_at: Instant,
+ start_x: f64,
+ start_y: f64,
+ latest_x: f64,
+ latest_y: f64,
+}
+
+async fn run_android_control_message(
+ state: AppState,
+ udid: String,
+ message: ControlMessage,
+ active_touch: &mut Option,
+) -> Result<(), AppError> {
+ match message {
+ ControlMessage::Touch { x, y, phase } => {
+ handle_android_control_touch(state, udid, x, y, phase, active_touch).await
+ }
+ ControlMessage::EdgeTouch { x, y, phase, .. } => {
+ handle_android_control_touch(state, udid, x, y, phase, active_touch).await
+ }
+ ControlMessage::MultiTouch { x1, y1, phase, .. } => {
+ handle_android_control_touch(state, udid, x1, y1, phase, active_touch).await
+ }
+ other => {
+ run_android_action(state, move |android| match other {
+ ControlMessage::Key {
+ key_code,
+ modifiers,
+ } => android.send_key(&udid, key_code, modifiers.unwrap_or(0)),
+ ControlMessage::Button {
+ button,
+ duration_ms,
+ phase,
+ ..
+ } => match phase.as_deref() {
+ Some("down" | "began") => Ok(()),
+ Some("up" | "ended" | "cancelled") | None => {
+ android.press_button(&udid, &button, duration_ms.unwrap_or(0))
+ }
+ Some(_) => Err(AppError::bad_request(
+ "`phase` must be `down`, `up`, `began`, `ended`, or `cancelled`.",
+ )),
+ },
+ ControlMessage::DismissKeyboard => android.send_key(&udid, 41, 0),
+ ControlMessage::Home => android.press_home(&udid),
+ ControlMessage::AppSwitcher => android.open_app_switcher(&udid),
+ ControlMessage::RotateLeft | ControlMessage::RotateRight => {
+ android.rotate_right(&udid)
+ }
+ ControlMessage::ToggleAppearance => android.toggle_appearance(&udid),
+ ControlMessage::Touch { .. }
+ | ControlMessage::EdgeTouch { .. }
+ | ControlMessage::MultiTouch { .. } => Ok(()),
+ })
+ .await
+ }
+ }
+}
+
+async fn handle_android_control_touch(
+ state: AppState,
+ udid: String,
+ x: f64,
+ y: f64,
+ phase: String,
+ active_touch: &mut Option,
+) -> Result<(), AppError> {
+ if !x.is_finite() || !y.is_finite() {
+ return Err(AppError::bad_request(
+ "`x` and `y` must be finite normalized numbers.",
+ ));
+ }
+ let x = x.clamp(0.0, 1.0);
+ let y = y.clamp(0.0, 1.0);
+ match phase.as_str() {
+ "began" => {
+ *active_touch = Some(AndroidControlTouch {
+ started_at: Instant::now(),
+ start_x: x,
+ start_y: y,
+ latest_x: x,
+ latest_y: y,
+ });
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "began")
+ })
+ .await
+ }
+ "moved" => {
+ if let Some(touch) = active_touch.as_mut() {
+ touch.latest_x = x;
+ touch.latest_y = y;
+ }
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "moved")
+ })
+ .await
+ }
+ "ended" => {
+ let touch = active_touch.take().unwrap_or(AndroidControlTouch {
+ started_at: Instant::now(),
+ start_x: x,
+ start_y: y,
+ latest_x: x,
+ latest_y: y,
+ });
+ let end_x = x;
+ let end_y = y;
+ let distance =
+ ((end_x - touch.start_x).powi(2) + (end_y - touch.start_y).powi(2)).sqrt();
+ let duration_ms = touch.started_at.elapsed().as_millis().clamp(80, 1500) as u64;
+ run_android_action(state, move |android| {
+ if distance >= 0.025 {
+ android
+ .send_touch(&udid, end_x, end_y, "ended")
+ .or_else(|_| {
+ android.send_swipe(
+ &udid,
+ touch.start_x,
+ touch.start_y,
+ end_x,
+ end_y,
+ duration_ms,
+ )
+ })
+ } else {
+ android.send_touch(&udid, end_x, end_y, "ended")
+ }
+ })
+ .await
+ }
+ "cancelled" => {
+ *active_touch = None;
+ Ok(())
+ }
+ _ => Ok(()),
+ }
+}
+
async fn webrtc_offer(
State(state): State,
Path(udid): Path,
@@ -2145,6 +2548,13 @@ async fn send_key(
Path(udid): Path,
Json(payload): Json,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_key(&udid, payload.key_code, payload.modifiers.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
bridge.send_key(&udid, payload.key_code, payload.modifiers.unwrap_or(0))
})
@@ -2167,6 +2577,21 @@ async fn send_key_sequence(
"Key sequence cannot contain more than 512 key codes.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let delay_ms = payload.delay_ms.unwrap_or(0);
+ let key_count = payload.key_codes.len();
+ for (index, key_code) in payload.key_codes.into_iter().enumerate() {
+ android.send_key(&udid, key_code, 0)?;
+ if delay_ms > 0 && index + 1 < key_count {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let delay_ms = payload.delay_ms.unwrap_or(0);
@@ -2187,6 +2612,10 @@ async fn dismiss_keyboard(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.send_key(&udid, 41, 0)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.send_key(&udid, 41, 0)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -2199,6 +2628,13 @@ async fn press_button(
if payload.button.trim().is_empty() {
return Err(AppError::bad_request("Request body must include `button`."));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.press_button(&udid, &payload.button, payload.duration_ms.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
if let Some(phase) = payload.phase.as_deref() {
let pressed = match phase {
"down" | "began" => true,
@@ -2232,6 +2668,10 @@ async fn press_home(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.press_home(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.press_home(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -2240,6 +2680,10 @@ async fn open_app_switcher(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_app_switcher(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.open_app_switcher(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -2248,6 +2692,10 @@ async fn rotate_right(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_right(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -2256,6 +2704,10 @@ async fn rotate_left(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_left(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -2264,6 +2716,11 @@ async fn chrome_profile(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let profile =
+ run_android_action(state, move |android| android.chrome_profile(&udid)).await?;
+ return Ok(json(profile));
+ }
let profile = run_bridge_action(state, move |bridge| bridge.chrome_profile(&udid)).await?;
Ok(json(json_value!(profile)))
}
@@ -2273,6 +2730,11 @@ async fn chrome_png(
Path(udid): Path,
Query(query): Query,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
+ if android::is_android_id(&udid) {
+ return Err(AppError::not_found(
+ "Android emulators do not expose device chrome assets.",
+ ));
+ }
let include_buttons = query
.buttons
.as_deref()
@@ -2344,6 +2806,11 @@ async fn screen_mask_png(
State(state): State,
Path(udid): Path,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
+ if android::is_android_id(&udid) {
+ return Err(AppError::not_found(
+ "Android emulators do not expose screen mask assets.",
+ ));
+ }
let png = run_bridge_action(state, move |bridge| bridge.screen_mask_png(&udid)).await?;
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
@@ -2378,6 +2845,22 @@ async fn accessibility_tree_value(
max_depth: Option,
include_hidden: bool,
) -> Result {
+ if android::is_android_id(&udid) {
+ let requested_source = source
+ .filter(|source| *source != "auto")
+ .map(|source| source.to_owned());
+ return run_android_action(state, move |android| {
+ let mut tree = android.accessibility_tree(&udid, max_depth)?;
+ if include_hidden {
+ tree["includeHidden"] = Value::Bool(true);
+ }
+ if let Some(source) = requested_source {
+ tree["requestedSource"] = Value::String(source);
+ }
+ Ok(tree)
+ })
+ .await;
+ }
let requested_source = AccessibilityHierarchySource::parse(source)?;
let max_depth = max_depth.map(|depth| depth.min(80));
@@ -2516,6 +2999,13 @@ async fn accessibility_point(
));
}
+ if android::is_android_id(&udid) {
+ let snapshot = run_android_action(state, move |android| {
+ android.accessibility_tree(&udid, None)
+ })
+ .await?;
+ return Ok(json(snapshot));
+ }
let snapshot = accessibility_snapshot(state, udid, Some((query.x, query.y)), None).await?;
Ok(json(snapshot))
}
@@ -2564,6 +3054,17 @@ async fn perform_tap_payload(
tap_point_from_snapshot(&snapshot, &payload.selector)?
};
+ if android::is_android_id(&udid) {
+ return run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "began")?;
+ if duration_ms > 0 {
+ std::thread::sleep(Duration::from_millis(duration_ms));
+ }
+ android.send_touch(&udid, x, y, "ended")
+ })
+ .await;
+ }
+
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
input.send_touch(x, y, "began")?;
@@ -2646,6 +3147,13 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
key_code,
modifiers,
} => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_key(&udid, key_code, modifiers.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json_value!({ "action": "key" }));
+ }
run_bridge_action(state, move |bridge| {
bridge.send_key(&udid, key_code, modifiers.unwrap_or(0))
})
@@ -2664,6 +3172,21 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"keySequence cannot contain more than 512 key codes.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let delay_ms = delay_ms.unwrap_or(0);
+ let key_count = key_codes.len();
+ for (index, key_code) in key_codes.into_iter().enumerate() {
+ android.send_key(&udid, key_code, 0)?;
+ if delay_ms > 0 && index + 1 < key_count {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "keySequence" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let delay_ms = delay_ms.unwrap_or(0);
@@ -2692,6 +3215,28 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"touch requires finite normalized x and y.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let x = x.clamp(0.0, 1.0);
+ let y = y.clamp(0.0, 1.0);
+ if down.unwrap_or(false) || up.unwrap_or(false) {
+ if down.unwrap_or(false) {
+ android.send_touch(&udid, x, y, "began")?;
+ }
+ if down.unwrap_or(false) && up.unwrap_or(false) {
+ std::thread::sleep(Duration::from_millis(delay_ms.unwrap_or(100)));
+ }
+ if up.unwrap_or(false) {
+ android.send_touch(&udid, x, y, "ended")?;
+ }
+ } else {
+ android.send_touch(&udid, x, y, phase.as_deref().unwrap_or("began"))?;
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "touch" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let x = x.clamp(0.0, 1.0);
@@ -2723,6 +3268,31 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"touchSequence cannot contain more than 64 events.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ for event in events {
+ if !event.x.is_finite() || !event.y.is_finite() {
+ return Err(AppError::bad_request(
+ "touchSequence requires finite normalized x and y.",
+ ));
+ }
+ android.send_touch(
+ &udid,
+ event.x.clamp(0.0, 1.0),
+ event.y.clamp(0.0, 1.0),
+ &event.phase,
+ )?;
+ if let Some(delay_ms) =
+ event.delay_ms_after.filter(|delay_ms| *delay_ms > 0)
+ {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "touchSequence" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for event in events {
@@ -2762,6 +3332,20 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"swipe requires finite normalized coordinates.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_swipe(
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms.unwrap_or(350),
+ )
+ })
+ .await?;
+ return Ok(json_value!({ "action": "swipe" }));
+ }
run_bridge_action(state, move |bridge| {
let step_count = steps.unwrap_or(12).max(1);
let delay =
@@ -2794,6 +3378,20 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
} => {
let (start_x, start_y, end_x, end_y, default_duration_ms) =
normalized_gesture_coordinates(&preset, delta)?;
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_swipe(
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms.unwrap_or(default_duration_ms),
+ )
+ })
+ .await?;
+ return Ok(json_value!({ "action": "gesture", "preset": preset }));
+ }
run_bridge_action(state, move |bridge| {
let step_count = steps.unwrap_or(12).max(1);
let delay = Duration::from_millis(
@@ -2816,6 +3414,23 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
Ok(json_value!({ "action": "gesture", "preset": preset }))
}
BatchStep::Type { text, delay_ms } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ if delay_ms.is_some() {
+ for character in text.chars() {
+ android.type_text(&udid, &character.to_string())?;
+ if let Some(delay_ms) = delay_ms.filter(|delay_ms| *delay_ms > 0) {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ } else {
+ android.type_text(&udid, &text)
+ }
+ })
+ .await?;
+ return Ok(json_value!({ "action": "type" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for character in text.chars() {
@@ -2838,6 +3453,13 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
button,
duration_ms,
} => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.press_button(&udid, &button, duration_ms.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json_value!({ "action": "button" }));
+ }
run_bridge_action(state, move |bridge| {
bridge.press_button(&udid, &button, duration_ms.unwrap_or(0))
})
@@ -2845,34 +3467,69 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
Ok(json_value!({ "action": "button" }))
}
BatchStep::Launch { bundle_id } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.launch_package(&udid, &bundle_id)
+ })
+ .await?;
+ return Ok(json_value!({ "action": "launch" }));
+ }
run_bridge_action(state, move |bridge| bridge.launch_bundle(&udid, &bundle_id)).await?;
Ok(json_value!({ "action": "launch" }))
}
BatchStep::OpenUrl { url } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_url(&udid, &url)).await?;
+ return Ok(json_value!({ "action": "openUrl" }));
+ }
run_bridge_action(state, move |bridge| bridge.open_url(&udid, &url)).await?;
Ok(json_value!({ "action": "openUrl" }))
}
BatchStep::Home => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.press_home(&udid)).await?;
+ return Ok(json_value!({ "action": "home" }));
+ }
run_bridge_action(state, move |bridge| bridge.press_home(&udid)).await?;
Ok(json_value!({ "action": "home" }))
}
BatchStep::DismissKeyboard => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.send_key(&udid, 41, 0)).await?;
+ return Ok(json_value!({ "action": "dismissKeyboard" }));
+ }
run_bridge_action(state, move |bridge| bridge.send_key(&udid, 41, 0)).await?;
Ok(json_value!({ "action": "dismissKeyboard" }))
}
BatchStep::AppSwitcher => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_app_switcher(&udid)).await?;
+ return Ok(json_value!({ "action": "appSwitcher" }));
+ }
run_bridge_action(state, move |bridge| bridge.open_app_switcher(&udid)).await?;
Ok(json_value!({ "action": "appSwitcher" }))
}
BatchStep::RotateLeft => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json_value!({ "action": "rotateLeft" }));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_left(&udid)).await?;
Ok(json_value!({ "action": "rotateLeft" }))
}
BatchStep::RotateRight => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json_value!({ "action": "rotateRight" }));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_right(&udid)).await?;
Ok(json_value!({ "action": "rotateRight" }))
}
BatchStep::ToggleAppearance => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.toggle_appearance(&udid)).await?;
+ return Ok(json_value!({ "action": "toggleAppearance" }));
+ }
run_bridge_action(state, move |bridge| bridge.toggle_appearance(&udid)).await?;
Ok(json_value!({ "action": "toggleAppearance" }))
}
@@ -3309,6 +3966,10 @@ async fn simulator_logs(
Query(query): Query,
) -> Result, AppError> {
let limit = query.limit.unwrap_or(250).clamp(1, 1000);
+ if android::is_android_id(&udid) {
+ let entries = run_android_action(state, move |android| android.logs(&udid, limit)).await?;
+ return Ok(json(json_value!({ "entries": entries })));
+ }
let filters = LogFilters::new(
split_filter_values(query.levels.as_deref()),
split_filter_values(query.processes.as_deref()),
@@ -4643,6 +5304,28 @@ where
})?
}
+async fn run_android_action(state: AppState, action: F) -> Result
+where
+ F: FnOnce(AndroidBridge) -> Result + Send + 'static,
+ T: Send + 'static,
+{
+ let android = state.android.clone();
+ task::spawn_blocking(move || action(android))
+ .await
+ .map_err(|error| {
+ AppError::internal(format!("Failed to join Android bridge task: {error}"))
+ })?
+}
+
+async fn all_device_values(state: AppState, force_refresh: bool) -> Result, AppError> {
+ let ios = list_simulators_cached(state.clone(), force_refresh).await?;
+ let mut values = state.registry.enrich_simulators(ios);
+ let android_devices =
+ run_android_action(state.clone(), |android| android.list_devices()).await?;
+ values.extend(state.android.enrich_devices(android_devices));
+ Ok(values)
+}
+
async fn list_simulators_cached(
state: AppState,
force_refresh: bool,
@@ -4675,8 +5358,7 @@ async fn accessibility_snapshot(
}
async fn simulator_payload(state: AppState, udid: String) -> Result, AppError> {
- let simulators = list_simulators_cached(state.clone(), true).await?;
- let enriched = state.registry.enrich_simulators(simulators);
+ let enriched = all_device_values(state.clone(), true).await?;
let simulator = enriched
.into_iter()
.find(|entry| entry.get("udid").and_then(Value::as_str) == Some(udid.as_str()))
diff --git a/server/src/main.rs b/server/src/main.rs
index 6b0f6b99..8ddb0e87 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -1,3 +1,4 @@
+mod android;
mod api;
mod auth;
mod config;
@@ -2245,7 +2246,16 @@ fn main() -> anyhow::Result<()> {
up,
delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ if android_device && !normalized {
+ anyhow::bail!("Android touch coordinates require --normalized.");
+ }
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
if down || up {
let mut events = Vec::new();
if down {
@@ -2300,8 +2310,13 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
if let (Some(server_url), Some(x), Some(y), true, None, None, None, None) = (
- service_url.as_deref(),
+ command_server_url.as_deref(),
x,
y,
normalized,
@@ -2313,7 +2328,7 @@ fn main() -> anyhow::Result<()> {
sleep_ms(pre_delay_ms);
service_tap(server_url, &udid, x, y, duration_ms)?;
sleep_ms(post_delay_ms);
- } else if let Some(server_url) = service_url.as_deref() {
+ } else if let Some(server_url) = command_server_url.as_deref() {
sleep_ms(pre_delay_ms);
service_tap_element(
server_url,
@@ -2375,18 +2390,44 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ if android_device && !normalized {
+ anyhow::bail!("Android swipe coordinates require --normalized.");
+ }
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
sleep_ms(pre_delay_ms);
- service_swipe(
- server_url,
- &udid,
- start_x,
- start_y,
- end_x,
- end_y,
- duration_ms,
- steps,
- )?;
+ if android_device {
+ service_batch(
+ server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "swipe",
+ "startX": start_x,
+ "startY": start_y,
+ "endX": end_x,
+ "endY": end_y,
+ "durationMs": duration_ms,
+ "steps": steps,
+ })],
+ false,
+ )?;
+ } else {
+ service_swipe(
+ server_url,
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms,
+ steps,
+ )?;
+ }
sleep_ms(post_delay_ms);
} else {
let (start_x, start_y) =
@@ -2421,7 +2462,36 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if android_device {
+ let server_url = command_server_url
+ .as_deref()
+ .ok_or_else(|| anyhow::anyhow!("Android command requires SimDeck daemon."))?;
+ sleep_ms(pre_delay_ms);
+ service_batch(
+ server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "gesture",
+ "preset": preset,
+ "durationMs": duration_ms,
+ "delta": delta,
+ "steps": 4,
+ })],
+ false,
+ )?;
+ sleep_ms(post_delay_ms);
+ println_json(
+ &serde_json::json!({ "ok": true, "udid": udid, "action": "gesture", "preset": preset }),
+ )?;
+ return Ok(());
+ }
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
let gesture = gesture_coordinates(
&bridge,
&udid,
@@ -2484,6 +2554,9 @@ fn main() -> anyhow::Result<()> {
duration_ms,
steps,
} => {
+ if android::is_android_id(&udid) {
+ anyhow::bail!("Android pinch gestures are not supported by the ADB input bridge.");
+ }
let frames = pinch_frames(
&bridge,
&udid,
@@ -2509,6 +2582,9 @@ fn main() -> anyhow::Result<()> {
duration_ms,
steps,
} => {
+ if android::is_android_id(&udid) {
+ anyhow::bail!("Android rotate gestures are not supported by the ADB input bridge.");
+ }
let frames = rotate_gesture_frames(
&bridge,
&udid,
@@ -2537,7 +2613,12 @@ fn main() -> anyhow::Result<()> {
} => {
let key_code = parse_hid_key(&key)?;
sleep_ms(pre_delay_ms);
- if let Some(server_url) = service_url.as_deref().filter(|_| duration_ms == 0) {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| duration_ms == 0) {
service_key(server_url, &udid, key_code, modifiers)?;
} else if duration_ms > 0 && modifiers == 0 {
let input = bridge.create_input_session(&udid)?;
@@ -2557,7 +2638,12 @@ fn main() -> anyhow::Result<()> {
delay_ms,
} => {
let keys = parse_key_list(&keycodes)?;
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_key_sequence(server_url, &udid, &keys, delay_ms)?;
} else {
let input = bridge.create_input_session(&udid)?;
@@ -2580,7 +2666,12 @@ fn main() -> anyhow::Result<()> {
} => {
let modifier_mask = parse_modifier_mask(&modifiers)?;
let key_code = parse_hid_key(&key)?;
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_key(server_url, &udid, key_code, modifier_mask)?;
} else {
bridge.send_key(&udid, key_code, modifier_mask)?;
@@ -2596,7 +2687,21 @@ fn main() -> anyhow::Result<()> {
delay_ms,
} => {
let text = read_text_input(text, stdin, file)?;
- type_text(&bridge, &udid, &text, delay_ms)?;
+ if android::is_android_id(&udid) {
+ let server_url = command_service_url(explicit_server_url.clone())?;
+ service_batch(
+ &server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "type",
+ "text": text,
+ "delayMs": delay_ms,
+ })],
+ false,
+ )?;
+ } else {
+ type_text(&bridge, &udid, &text, delay_ms)?;
+ }
println_json(&serde_json::json!({ "ok": true, "udid": udid, "action": "type" }))?;
Ok(())
}
@@ -2605,7 +2710,12 @@ fn main() -> anyhow::Result<()> {
button,
duration_ms,
} => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_button(server_url, &udid, &button, duration_ms)?;
} else {
bridge.press_button(&udid, &button, duration_ms)?;
@@ -2622,7 +2732,12 @@ fn main() -> anyhow::Result<()> {
stdin,
continue_on_error,
} => {
- let report = if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ let report = if let Some(server_url) = command_server_url.as_deref() {
let step_lines = read_batch_steps(steps, file, stdin)?;
service_batch(
server_url,
@@ -2637,7 +2752,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::DismissKeyboard { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "dismiss-keyboard", &Value::Null)?;
} else {
bridge.send_key(&udid, 41, 0)?;
@@ -2651,7 +2771,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::Home { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "home", &Value::Null)?;
} else {
bridge.press_home(&udid)?;
@@ -2660,7 +2785,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::AppSwitcher { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "app-switcher", &Value::Null)?;
} else {
bridge.open_app_switcher(&udid)?;
@@ -2671,7 +2801,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::RotateLeft { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "rotate-left", &Value::Null)?;
} else {
bridge.rotate_left(&udid)?;
@@ -2682,7 +2817,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::RotateRight { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "rotate-right", &Value::Null)?;
} else {
bridge.rotate_right(&udid)?;
@@ -5179,6 +5319,7 @@ async fn serve(
inspectors,
metrics,
simulator_inventory: Default::default(),
+ android: Default::default(),
};
let http_router = app_router(
diff --git a/server/src/native/ffi.rs b/server/src/native/ffi.rs
index 2d0659fb..9d6b2df1 100644
--- a/server/src/native/ffi.rs
+++ b/server/src/native/ffi.rs
@@ -268,6 +268,23 @@ unsafe extern "C" {
error_message: *mut *mut c_char,
) -> bool;
+ pub fn xcw_native_h264_encoder_create(
+ callback: Option,
+ user_data: *mut c_void,
+ error_message: *mut *mut c_char,
+ ) -> *mut c_void;
+ pub fn xcw_native_h264_encoder_destroy(handle: *mut c_void);
+ pub fn xcw_native_h264_encoder_encode_rgba(
+ handle: *mut c_void,
+ rgba: *const u8,
+ length: usize,
+ width: u32,
+ height: u32,
+ timestamp_us: u64,
+ error_message: *mut *mut c_char,
+ ) -> bool;
+ pub fn xcw_native_h264_encoder_request_keyframe(handle: *mut c_void);
+
pub fn xcw_native_free_string(value: *mut c_char);
pub fn xcw_native_free_bytes(bytes: xcw_native_owned_bytes);
pub fn xcw_native_release_shared_bytes(bytes: xcw_native_shared_bytes);
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index e9caff8b..ccae059b 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -1,18 +1,22 @@
+use crate::android;
use crate::api::routes::{
apply_stream_quality_payload, run_control_message, run_toggle_appearance_control, AppState,
ControlMessage, StreamQualityPayload,
};
use crate::error::AppError;
use crate::metrics::counters::ClientStreamStats;
+use crate::native::ffi;
+use crate::transport::packet::{FramePacket, SharedFrame};
use bytes::Bytes;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, VecDeque};
-use std::sync::atomic::Ordering;
-use std::sync::{Arc, Mutex, OnceLock};
+use std::ffi::{c_void, CStr};
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::{Arc, Mutex, OnceLock, RwLock, Weak};
use std::time::Duration;
use tokio::sync::{broadcast, mpsc};
use tokio::task;
-use tokio::time;
+use tokio::time::{self, Instant};
use tracing::{info, warn};
use webrtc::api::interceptor_registry::register_default_interceptors;
use webrtc::api::media_engine::{MediaEngine, MIME_TYPE_H264};
@@ -53,6 +57,10 @@ const WEBRTC_FAST_ICE_GATHER_TIMEOUT: Duration = Duration::from_millis(250);
const WEBRTC_FULL_ICE_GATHER_TIMEOUT: Duration = Duration::from_secs(3);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
+const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
+const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 1280;
+const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
+const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
OnceLock::new();
const MAX_WEBRTC_MEDIA_STREAMS_PER_UDID: usize = 16;
@@ -103,12 +111,6 @@ pub async fn create_answer(
"WebRTC payload must include type `offer`.",
));
}
-
- let session = state.registry.get_or_create_async(&udid).await?;
- if let Err(error) = session.ensure_started_async().await {
- state.registry.remove(&udid);
- return Err(error);
- }
if payload.transport.is_some() {
return Err(AppError::bad_request(
"WebRTC preview supports media tracks only.",
@@ -117,6 +119,26 @@ pub async fn create_answer(
if let Some(stream_config) = payload.stream_config.as_ref() {
apply_stream_quality_payload(&state, stream_config)?;
}
+
+ let source = if android::is_android_id(&udid) {
+ WebRtcVideoSource::Android(
+ AndroidWebRtcSource::start(
+ state.android.clone(),
+ state.metrics.clone(),
+ udid.clone(),
+ android_webrtc_max_edge(),
+ )
+ .await?,
+ )
+ } else {
+ let session = state.registry.get_or_create_async(&udid).await?;
+ if let Err(error) = session.ensure_started_async().await {
+ state.registry.remove(&udid);
+ return Err(error);
+ }
+ WebRtcVideoSource::Simulator(session)
+ };
+
info!(
"WebRTC offer for {udid}: remote_candidates={} remote_candidate_types={} ice_servers={} ice_transport_policy={}",
count_sdp_candidates(&payload.sdp),
@@ -128,9 +150,9 @@ pub async fn create_answer(
ice_transport_policy_label()
);
- let first_frame = wait_for_h264_sync_keyframe(&session, WEBRTC_INITIAL_KEYFRAME_TIMEOUT)
+ let first_frame = wait_for_h264_sync_keyframe(&source, WEBRTC_INITIAL_KEYFRAME_TIMEOUT)
.await
- .ok_or_else(|| AppError::native("Timed out waiting for a simulator H.264 keyframe."))?;
+ .ok_or_else(|| AppError::native("Timed out waiting for a device H.264 keyframe."))?;
let codec = first_frame
.codec
.as_deref()
@@ -179,13 +201,22 @@ pub async fn create_answer(
);
register_diagnostics(&peer_connection, &udid);
let (stream_control_tx, stream_control_rx) = mpsc::unbounded_channel();
- register_control_data_channel(
- &peer_connection,
- session.clone(),
- state.clone(),
- udid.clone(),
- stream_control_tx,
- );
+ match &source {
+ WebRtcVideoSource::Simulator(session) => register_control_data_channel(
+ &peer_connection,
+ session.clone(),
+ state.clone(),
+ udid.clone(),
+ stream_control_tx,
+ ),
+ WebRtcVideoSource::Android(source) => register_android_data_channel(
+ &peer_connection,
+ source.clone(),
+ state.clone(),
+ udid.clone(),
+ stream_control_tx,
+ ),
+ }
let video_track = Arc::new(TrackLocalStaticRTP::new(
RTCRtpCodecCapability {
@@ -203,7 +234,7 @@ pub async fn create_answer(
.add_track(video_track.clone() as Arc)
.await
.map_err(|error| AppError::internal(format!("add WebRTC video track: {error}")))?;
- let rtcp_session = session.clone();
+ let rtcp_source = source.clone();
let rtcp_udid = udid.clone();
tokio::spawn(async move {
while let Ok((packets, _attributes)) = rtp_sender.read_rtcp().await {
@@ -212,7 +243,7 @@ pub async fn create_answer(
.any(|packet| rtcp_packet_requests_keyframe(packet.as_ref()))
{
info!("WebRTC RTCP requested keyframe for {rtcp_udid}");
- rtcp_session.request_keyframe();
+ rtcp_source.request_keyframe();
}
}
});
@@ -264,7 +295,7 @@ pub async fn create_answer(
WebRtcMediaStream {
state,
udid,
- session,
+ source,
first_frame,
peer_connection,
video_track,
@@ -481,6 +512,202 @@ fn attach_control_data_channel(
}));
}
+fn register_android_data_channel(
+ peer_connection: &Arc,
+ source: AndroidWebRtcSource,
+ state: AppState,
+ udid: String,
+ stream_control_tx: mpsc::UnboundedSender,
+) {
+ peer_connection.on_data_channel(Box::new(move |channel: Arc| {
+ let source = source.clone();
+ let state = state.clone();
+ let udid = udid.clone();
+ let stream_control_tx = stream_control_tx.clone();
+ Box::pin(async move {
+ let label = channel.label();
+ if label != WEBRTC_CONTROL_CHANNEL_LABEL && label != WEBRTC_TELEMETRY_CHANNEL_LABEL {
+ return;
+ }
+ attach_android_data_channel(channel, source, state, udid, stream_control_tx);
+ })
+ }));
+}
+
+fn attach_android_data_channel(
+ channel: Arc,
+ source: AndroidWebRtcSource,
+ state: AppState,
+ udid: String,
+ stream_control_tx: mpsc::UnboundedSender,
+) {
+ let (control_tx, control_rx) = mpsc::unbounded_channel::();
+ task::spawn(run_android_webrtc_control_queue(
+ state.clone(),
+ udid.clone(),
+ control_rx,
+ ));
+ channel.on_message(Box::new(move |message: DataChannelMessage| {
+ let source = source.clone();
+ let state = state.clone();
+ let udid = udid.clone();
+ let stream_control_tx = stream_control_tx.clone();
+ let control_tx = control_tx.clone();
+ Box::pin(async move {
+ let Ok(text) = std::str::from_utf8(&message.data) else {
+ warn!("Invalid Android WebRTC control message bytes for {udid}");
+ return;
+ };
+ if let Ok(message) = serde_json::from_str::(text) {
+ match message {
+ WebRtcDataChannelMessage::ClientStats { stats } => {
+ if !stats.client_id.trim().is_empty() && !stats.kind.trim().is_empty() {
+ state.metrics.record_client_stream_stats(*stats);
+ }
+ }
+ WebRtcDataChannelMessage::StreamControl {
+ force_keyframe,
+ snapshot,
+ } => {
+ let command = WebRtcStreamCommand {
+ force_keyframe: force_keyframe.unwrap_or(false),
+ snapshot: snapshot.unwrap_or(false),
+ };
+ if command.force_keyframe || command.snapshot {
+ source.request_keyframe();
+ }
+ let _ = stream_control_tx.send(command);
+ }
+ WebRtcDataChannelMessage::StreamQuality { config } => {
+ if let Err(error) = apply_stream_quality_payload(&state, &config) {
+ warn!(
+ "Android WebRTC stream quality update failed for {udid}: {error}"
+ );
+ } else {
+ source.request_keyframe();
+ }
+ }
+ }
+ return;
+ }
+
+ let control_message = match serde_json::from_str::(text) {
+ Ok(message) => message,
+ Err(error) => {
+ warn!("Invalid Android WebRTC control message for {udid}: {error}");
+ return;
+ }
+ };
+ if control_tx.send(control_message).is_err() {
+ warn!("Android WebRTC control queue closed for {udid}");
+ }
+ })
+ }));
+}
+
+async fn run_android_webrtc_control_queue(
+ state: AppState,
+ udid: String,
+ mut receiver: mpsc::UnboundedReceiver,
+) {
+ let mut pending = VecDeque::new();
+ loop {
+ let mut message = match pending.pop_front() {
+ Some(message) => message,
+ None => match receiver.recv().await {
+ Some(message) => message,
+ None => break,
+ },
+ };
+ if webrtc_control_message_is_move(&message) {
+ while let Ok(next_message) = receiver.try_recv() {
+ if webrtc_control_message_is_move(&next_message) {
+ message = next_message;
+ } else {
+ pending.push_back(next_message);
+ break;
+ }
+ }
+ }
+
+ if let Err(error) =
+ run_android_webrtc_control_message(state.clone(), udid.clone(), message).await
+ {
+ warn!("Android WebRTC control message failed for {udid}: {error}");
+ }
+ }
+}
+
+async fn run_android_webrtc_control_message(
+ state: AppState,
+ udid: String,
+ message: ControlMessage,
+) -> Result<(), AppError> {
+ task::spawn_blocking(move || match message {
+ ControlMessage::Touch { x, y, phase } => {
+ if !x.is_finite() || !y.is_finite() {
+ return Err(AppError::bad_request(
+ "`x` and `y` must be finite normalized numbers.",
+ ));
+ }
+ state
+ .android
+ .send_touch(&udid, x.clamp(0.0, 1.0), y.clamp(0.0, 1.0), &phase)
+ }
+ ControlMessage::EdgeTouch { x, y, phase, .. } => {
+ if !x.is_finite() || !y.is_finite() {
+ return Err(AppError::bad_request(
+ "`x` and `y` must be finite normalized numbers.",
+ ));
+ }
+ state
+ .android
+ .send_touch(&udid, x.clamp(0.0, 1.0), y.clamp(0.0, 1.0), &phase)
+ }
+ ControlMessage::MultiTouch { x1, y1, phase, .. } => {
+ if !x1.is_finite() || !y1.is_finite() {
+ return Err(AppError::bad_request(
+ "`x1` and `y1` must be finite normalized numbers.",
+ ));
+ }
+ state
+ .android
+ .send_touch(&udid, x1.clamp(0.0, 1.0), y1.clamp(0.0, 1.0), &phase)
+ }
+ ControlMessage::Key {
+ key_code,
+ modifiers,
+ } => state
+ .android
+ .send_key(&udid, key_code, modifiers.unwrap_or(0)),
+ ControlMessage::Button {
+ button,
+ duration_ms,
+ phase,
+ ..
+ } => match phase.as_deref() {
+ Some("down" | "began") => Ok(()),
+ Some("up" | "ended" | "cancelled") | None => {
+ state
+ .android
+ .press_button(&udid, &button, duration_ms.unwrap_or(0))
+ }
+ Some(_) => Err(AppError::bad_request(
+ "`phase` must be `down`, `up`, `began`, `ended`, or `cancelled`.",
+ )),
+ },
+ ControlMessage::DismissKeyboard => state.android.send_key(&udid, 41, 0),
+ ControlMessage::Home => state.android.press_home(&udid),
+ ControlMessage::AppSwitcher => state.android.open_app_switcher(&udid),
+ ControlMessage::RotateLeft | ControlMessage::RotateRight => {
+ state.android.rotate_right(&udid)
+ }
+ ControlMessage::ToggleAppearance => state.android.toggle_appearance(&udid),
+ })
+ .await
+ .map_err(|error| AppError::internal(format!("Failed to join Android control task: {error}")))?
+}
+
async fn run_webrtc_control_queue(
session: crate::simulators::session::SimulatorSession,
state: AppState,
@@ -842,32 +1069,383 @@ fn ice_transport_policy() -> RTCIceTransportPolicy {
}
}
-async fn wait_for_h264_sync_keyframe(
- session: &crate::simulators::session::SimulatorSession,
- timeout_duration: Duration,
-) -> Option {
- if let Some(frame) = session.latest_keyframe() {
- if h264_frame_is_decoder_sync(&frame) {
- return Some(frame);
+#[derive(Clone)]
+struct AndroidWebRtcSource {
+ inner: Arc,
+}
+
+struct AndroidWebRtcSourceInner {
+ udid: String,
+ encoder_handle: AtomicUsize,
+ callback_user_data: AtomicUsize,
+ shutdown_tx: broadcast::Sender<()>,
+ sender: broadcast::Sender,
+ latest_keyframe: RwLock>,
+ metrics: Arc,
+}
+
+unsafe impl Send for AndroidWebRtcSourceInner {}
+unsafe impl Sync for AndroidWebRtcSourceInner {}
+
+impl AndroidWebRtcSource {
+ async fn start(
+ bridge: android::AndroidBridge,
+ metrics: Arc,
+ udid: String,
+ max_edge: u32,
+ ) -> Result {
+ let mut frame_stream = bridge.grpc_frame_stream(&udid, Some(max_edge)).await?;
+ let (sender, _) = broadcast::channel(ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY);
+ let (shutdown_tx, mut shutdown_rx) = broadcast::channel(1);
+ let inner = Arc::new(AndroidWebRtcSourceInner {
+ udid: udid.clone(),
+ encoder_handle: AtomicUsize::new(0),
+ callback_user_data: AtomicUsize::new(0),
+ shutdown_tx,
+ sender,
+ latest_keyframe: RwLock::new(None),
+ metrics,
+ });
+ let user_data = Weak::into_raw(Arc::downgrade(&inner)) as *mut c_void;
+ let mut error = std::ptr::null_mut();
+ let handle = unsafe {
+ ffi::xcw_native_h264_encoder_create(
+ Some(android_h264_encoder_frame_callback),
+ user_data,
+ &mut error,
+ )
+ };
+ if handle.is_null() {
+ unsafe {
+ let _ = Weak::from_raw(user_data as *const AndroidWebRtcSourceInner);
+ }
+ return Err(unsafe { take_native_error(error) }
+ .unwrap_or_else(|| AppError::native("Unable to create Android H.264 encoder.")));
}
+ inner
+ .encoder_handle
+ .store(handle as usize, Ordering::Release);
+ inner
+ .callback_user_data
+ .store(user_data as usize, Ordering::Release);
+
+ let source = Self { inner };
+ let task_inner = Arc::downgrade(&source.inner);
+ tokio::spawn(async move {
+ let min_frame_gap = android_webrtc_frame_interval();
+ let mut last_encoded_at = Instant::now() - min_frame_gap;
+ loop {
+ tokio::select! {
+ _ = shutdown_rx.recv() => break,
+ frame = frame_stream.next_frame() => {
+ let frame = match frame {
+ Ok(Some(frame)) => frame,
+ Ok(None) => break,
+ Err(error) => {
+ let udid = task_inner
+ .upgrade()
+ .map(|inner| inner.udid.clone())
+ .unwrap_or_else(|| "android".to_owned());
+ warn!("Android WebRTC raw frame stream failed for {udid}: {error}");
+ break;
+ }
+ };
+ let Some(inner) = task_inner.upgrade() else {
+ break;
+ };
+ let now = Instant::now();
+ if now.duration_since(last_encoded_at) < min_frame_gap {
+ continue;
+ }
+ last_encoded_at = now;
+ let handle = inner.encoder_handle.load(Ordering::Acquire);
+ let udid = inner.udid.clone();
+ let encode_result = task::spawn_blocking(move || {
+ encode_android_rgba_frame(handle, &frame)
+ })
+ .await
+ .map_err(|error| AppError::internal(format!("Failed to join Android encoder task: {error}")))
+ .and_then(|result| result);
+ if let Err(error) = encode_result {
+ warn!("Android VideoToolbox encode failed for {udid}: {error}");
+ }
+ }
+ }
+ }
+ });
+ source.request_keyframe();
+ Ok(source)
}
+ fn subscribe(&self) -> broadcast::Receiver {
+ self.inner.sender.subscribe()
+ }
+
+ async fn wait_for_keyframe(&self, timeout_duration: Duration) -> Option {
+ let deadline = Instant::now() + timeout_duration;
+ let baseline_sequence = self
+ .inner
+ .latest_keyframe
+ .read()
+ .unwrap()
+ .as_ref()
+ .map_or(0, |frame| frame.frame_sequence);
+ let mut receiver = self.inner.sender.subscribe();
+ self.request_keyframe();
+
+ loop {
+ if let Some(frame) = self.inner.latest_keyframe.read().unwrap().clone() {
+ if frame.frame_sequence > baseline_sequence {
+ return Some(frame);
+ }
+ }
+ let remaining = deadline.checked_duration_since(Instant::now())?;
+ match time::timeout(remaining, receiver.recv()).await {
+ Ok(Ok(frame)) if frame.is_keyframe && frame.frame_sequence > baseline_sequence => {
+ return Some(frame)
+ }
+ Ok(Ok(_)) | Ok(Err(broadcast::error::RecvError::Lagged(_))) => {
+ self.request_keyframe();
+ }
+ Ok(Err(_)) | Err(_) => return None,
+ }
+ }
+ }
+
+ fn request_refresh(&self) {}
+
+ fn request_keyframe(&self) {
+ self.inner
+ .metrics
+ .keyframe_requests
+ .fetch_add(1, Ordering::Relaxed);
+ unsafe {
+ ffi::xcw_native_h264_encoder_request_keyframe(
+ self.inner.encoder_handle.load(Ordering::Acquire) as *mut c_void,
+ );
+ }
+ }
+}
+
+impl Drop for AndroidWebRtcSourceInner {
+ fn drop(&mut self) {
+ let _ = self.shutdown_tx.send(());
+ let encoder_handle = self.encoder_handle.load(Ordering::Acquire);
+ let callback_user_data = self.callback_user_data.load(Ordering::Acquire);
+ unsafe {
+ if encoder_handle != 0 {
+ ffi::xcw_native_h264_encoder_destroy(encoder_handle as *mut c_void);
+ }
+ if callback_user_data != 0 {
+ let _ = Weak::from_raw(callback_user_data as *const AndroidWebRtcSourceInner);
+ }
+ }
+ }
+}
+
+unsafe extern "C" fn android_h264_encoder_frame_callback(
+ frame: *const ffi::xcw_native_frame,
+ user_data: *mut c_void,
+) {
+ if frame.is_null() || user_data.is_null() {
+ return;
+ }
+
+ let weak = unsafe { Weak::from_raw(user_data as *const AndroidWebRtcSourceInner) };
+ if let Some(inner) = weak.upgrade() {
+ unsafe {
+ inner.handle_encoded_frame(&*frame);
+ }
+ }
+ let _ = Weak::into_raw(weak);
+}
+
+impl AndroidWebRtcSourceInner {
+ fn handle_encoded_frame(&self, frame: &ffi::xcw_native_frame) {
+ let description = unsafe { copy_native_shared_bytes(frame.description) };
+ let Some(data) = (unsafe { copy_native_shared_bytes(frame.data) }) else {
+ return;
+ };
+ let packet = Arc::new(FramePacket {
+ frame_sequence: frame.frame_sequence,
+ timestamp_us: frame.timestamp_us,
+ is_keyframe: frame.is_keyframe,
+ width: frame.width,
+ height: frame.height,
+ codec: native_c_string(frame.codec),
+ description,
+ data,
+ });
+ self.metrics.frames_encoded.fetch_add(1, Ordering::Relaxed);
+ if packet.is_keyframe {
+ self.metrics
+ .keyframes_encoded
+ .fetch_add(1, Ordering::Relaxed);
+ *self.latest_keyframe.write().unwrap() = Some(packet.clone());
+ }
+ let _ = self.sender.send(packet);
+ }
+}
+
+fn encode_android_rgba_frame(
+ encoder_handle: usize,
+ frame: &android::AndroidFrame,
+) -> Result<(), AppError> {
+ unsafe {
+ let mut error = std::ptr::null_mut();
+ let ok = ffi::xcw_native_h264_encoder_encode_rgba(
+ encoder_handle as *mut c_void,
+ frame.rgba.as_ptr(),
+ frame.rgba.len(),
+ frame.width,
+ frame.height,
+ frame.timestamp_us,
+ &mut error,
+ );
+ if ok {
+ Ok(())
+ } else {
+ Err(take_native_error(error)
+ .unwrap_or_else(|| AppError::native("Android VideoToolbox encode failed.")))
+ }
+ }
+}
+
+unsafe fn copy_native_shared_bytes(bytes: ffi::xcw_native_shared_bytes) -> Option {
+ if bytes.data.is_null() || bytes.length == 0 {
+ if !bytes.owner.is_null() {
+ unsafe {
+ ffi::xcw_native_release_shared_bytes(bytes);
+ }
+ }
+ return None;
+ }
+
+ let copied =
+ unsafe { Bytes::copy_from_slice(std::slice::from_raw_parts(bytes.data, bytes.length)) };
+ unsafe {
+ ffi::xcw_native_release_shared_bytes(bytes);
+ }
+ Some(copied)
+}
+
+fn native_c_string(ptr: *const i8) -> Option {
+ if ptr.is_null() {
+ return None;
+ }
+ let value = unsafe { CStr::from_ptr(ptr) }
+ .to_string_lossy()
+ .trim()
+ .to_owned();
+ if value.is_empty() {
+ None
+ } else {
+ Some(value)
+ }
+}
+
+unsafe fn take_native_error(raw: *mut i8) -> Option {
+ if raw.is_null() {
+ return None;
+ }
+ let message = unsafe { CStr::from_ptr(raw) }
+ .to_string_lossy()
+ .into_owned();
+ unsafe {
+ ffi::xcw_native_free_string(raw);
+ }
+ Some(AppError::native(message))
+}
+
+fn android_webrtc_max_edge() -> u32 {
+ std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .unwrap_or(DEFAULT_ANDROID_WEBRTC_MAX_EDGE)
+ .clamp(360, 2400)
+}
+
+fn android_webrtc_frame_interval() -> Duration {
+ let fps = std::env::var("SIMDECK_REALTIME_FPS")
+ .or_else(|_| std::env::var("SIMDECK_LOCAL_STREAM_FPS"))
+ .or_else(|_| std::env::var("SIMDECK_ANDROID_WEBRTC_FPS"))
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .unwrap_or(DEFAULT_ANDROID_WEBRTC_FPS)
+ .clamp(15, MAX_ANDROID_WEBRTC_FPS);
+ Duration::from_micros(1_000_000 / fps)
+}
+
+#[derive(Clone)]
+enum WebRtcVideoSource {
+ Simulator(crate::simulators::session::SimulatorSession),
+ Android(AndroidWebRtcSource),
+}
+
+impl WebRtcVideoSource {
+ fn subscribe(&self) -> WebRtcFrameReceiver {
+ match self {
+ Self::Simulator(session) => WebRtcFrameReceiver::Simulator(session.subscribe()),
+ Self::Android(source) => WebRtcFrameReceiver::Android(source.subscribe()),
+ }
+ }
+
+ async fn wait_for_keyframe(&self, timeout_duration: Duration) -> Option {
+ match self {
+ Self::Simulator(session) => session.wait_for_keyframe(timeout_duration).await,
+ Self::Android(source) => source.wait_for_keyframe(timeout_duration).await,
+ }
+ }
+
+ fn request_refresh(&self) {
+ match self {
+ Self::Simulator(session) => session.request_refresh(),
+ Self::Android(source) => source.request_refresh(),
+ }
+ }
+
+ fn request_keyframe(&self) {
+ match self {
+ Self::Simulator(session) => session.request_keyframe(),
+ Self::Android(source) => source.request_keyframe(),
+ }
+ }
+}
+
+enum WebRtcFrameReceiver {
+ Simulator(crate::simulators::session::FrameSubscription),
+ Android(broadcast::Receiver),
+}
+
+impl WebRtcFrameReceiver {
+ async fn recv(&mut self) -> Result {
+ match self {
+ Self::Simulator(receiver) => receiver.recv().await,
+ Self::Android(receiver) => receiver.recv().await,
+ }
+ }
+}
+
+async fn wait_for_h264_sync_keyframe(
+ source: &WebRtcVideoSource,
+ timeout_duration: Duration,
+) -> Option {
let deadline = time::Instant::now() + timeout_duration;
loop {
let remaining = deadline.checked_duration_since(time::Instant::now())?;
- let frame = session.wait_for_keyframe(remaining).await?;
+ let frame = source.wait_for_keyframe(remaining).await?;
if h264_frame_is_decoder_sync(&frame) {
return Some(frame);
}
- session.request_keyframe();
+ source.request_keyframe();
}
}
struct WebRtcMediaStream {
state: AppState,
- session: crate::simulators::session::SimulatorSession,
+ source: WebRtcVideoSource,
udid: String,
- first_frame: crate::transport::packet::SharedFrame,
+ first_frame: SharedFrame,
peer_connection: Arc,
video_track: Arc,
cancellation_token: broadcast::Sender<()>,
@@ -879,7 +1457,7 @@ impl WebRtcMediaStream {
async fn run(self) {
let Self {
state,
- session,
+ source,
udid,
first_frame,
peer_connection,
@@ -888,7 +1466,7 @@ impl WebRtcMediaStream {
mut cancellation,
mut stream_control_rx,
} = self;
- let mut rx = session.subscribe();
+ let mut rx = source.subscribe();
let mut send_timing = WebRtcSendTiming::new();
let mut peer_state_interval = time::interval(Duration::from_millis(250));
let realtime_stream = realtime_stream_enabled();
@@ -925,10 +1503,10 @@ impl WebRtcMediaStream {
if recovery_action_for_write_timeout(realtime_stream)
== FrameRecoveryAction::Refresh
{
- session.request_refresh();
+ source.request_refresh();
} else {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
}
}
Err(error) => {
@@ -967,9 +1545,9 @@ impl WebRtcMediaStream {
};
if command.force_keyframe || command.snapshot {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
} else {
- session.request_refresh();
+ source.request_refresh();
}
}
frame = rx.recv() => {
@@ -981,7 +1559,7 @@ impl WebRtcMediaStream {
.frames_dropped_server
.fetch_add(skipped, Ordering::Relaxed);
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
continue;
}
Err(broadcast::error::RecvError::Closed) => {
@@ -997,7 +1575,7 @@ impl WebRtcMediaStream {
waiting_for_keyframe = false;
} else if frame.is_keyframe {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
state.metrics.frames_dropped_server.fetch_add(1, Ordering::Relaxed);
continue;
}
@@ -1022,9 +1600,9 @@ impl WebRtcMediaStream {
let recovery_action = recovery_action_for_write_timeout(realtime_stream);
waiting_for_keyframe = recovery_action == FrameRecoveryAction::Keyframe;
if recovery_action == FrameRecoveryAction::Refresh {
- session.request_refresh();
+ source.request_refresh();
} else {
- session.request_keyframe();
+ source.request_keyframe();
}
}
Err(error) => {
diff --git a/skills/simdeck/SKILL.md b/skills/simdeck/SKILL.md
index c37ea448..aced60a0 100644
--- a/skills/simdeck/SKILL.md
+++ b/skills/simdeck/SKILL.md
@@ -5,7 +5,7 @@ description: Use for simulator lifecycle, app install/launch, live viewing, UI i
# SimDeck Agent Guide
-SimDeck automates iOS Simulators. Use the CLI for automation and the browser UI for live human visibility. Works with UIKit, SwiftUI, React Native, Expo, and NativeScript apps.
+SimDeck automates iOS Simulators and Android emulators. Use the CLI for automation and the browser UI for live human visibility. iOS works with UIKit, SwiftUI, React Native, Expo, and NativeScript apps; Android works through ADB, emulator lifecycle, screenshots, logs, and UIAutomator hierarchy dumps.
SimDeck uses one warm daemon per project. Check it with `simdeck daemon status`; start it or open the browser UI when needed:
@@ -47,6 +47,7 @@ simdeck shutdown
simdeck erase
simdeck core-simulator restart
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck launch com.example.App
simdeck uninstall com.example.App
simdeck open-url myapp://route
@@ -56,6 +57,12 @@ simdeck toggle-appearance
Build apps with project tooling.
+Android devices use IDs like `android:Pixel_8_API_36`. `simdeck list` discovers
+AVDs from the Android SDK, `boot` starts `emulator -avd ... -no-window`, and
+live browser viewing uses the same WebRTC H.264 endpoint as iOS. Android frames
+come from emulator gRPC and are encoded through VideoToolbox. `simdeck stream`
+is still iOS-only.
+
## Fast Agent Inspection
Use targeted checks for test loops. `describe` is a diagnostic snapshot of the whole hierarchy; it is useful for planning, but it is expensive. For verification, prefer the daemon APIs exposed by `simdeck/test`: `query`, `waitFor`, `assert`, selector `tap`, and `batch`.
@@ -75,6 +82,8 @@ simdeck describe --direct
```
Use `--source auto` with the project daemon. Use `--direct` or `--source native-ax` for the private CoreSimulator accessibility bridge. NativeScript, React Native, and Flutter inspector runtimes can add richer hierarchy data.
+For Android IDs, `describe` uses `uiautomator dump`; use `--format agent` or
+`--format compact-json` the same way as iOS.
Prefer selectors, coordinates only when needed. Selector taps go through the daemon and wait for the element server-side.
From 3f38fd7d486d4b177d0a16f64a910f8906cf31b1 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 13:57:06 -0400
Subject: [PATCH 02/29] Fix CI lint failures
---
client/src/app/AppShell.tsx | 5 +++--
client/src/styles/components.css | 9 ++++++---
docs/extensions/browser-client.md | 16 ++++++++--------
docs/guide/architecture.md | 22 +++++++++++-----------
server/src/android.rs | 11 +++++++----
5 files changed, 35 insertions(+), 28 deletions(-)
diff --git a/client/src/app/AppShell.tsx b/client/src/app/AppShell.tsx
index f634dc56..78f444dd 100644
--- a/client/src/app/AppShell.tsx
+++ b/client/src/app/AppShell.tsx
@@ -746,11 +746,12 @@ export function AppShell({
[selectedSimulator?.udid, streamStamp],
);
const chromeUsesAsset = Boolean(
- viewportChromeProfile && viewportChromeProfile.chromeStyle !== "css-android",
+ viewportChromeProfile &&
+ viewportChromeProfile.chromeStyle !== "css-android",
);
const chromeRequired = Boolean(
(shouldRenderChrome && !chromeProfileReady) ||
- (chromeUsesAsset && chromeUrl),
+ (chromeUsesAsset && chromeUrl),
);
const simulatorRotationQuarterTurns =
normalizeSimulatorRotationQuarterTurns(selectedSimulator);
diff --git a/client/src/styles/components.css b/client/src/styles/components.css
index 8a4183c4..b4686f1d 100644
--- a/client/src/styles/components.css
+++ b/client/src/styles/components.css
@@ -1508,8 +1508,7 @@
border-radius: 64px;
background:
linear-gradient(145deg, rgba(255, 255, 255, 0.12), transparent 22%),
- linear-gradient(315deg, rgba(0, 0, 0, 0.42), transparent 34%),
- #17191f;
+ linear-gradient(315deg, rgba(0, 0, 0, 0.42), transparent 34%), #17191f;
box-shadow:
inset 0 0 0 2px rgba(255, 255, 255, 0.08),
inset 0 0 0 9px rgba(0, 0, 0, 0.34),
@@ -1539,7 +1538,11 @@
height: 17px;
border-radius: 50%;
background:
- radial-gradient(circle at 58% 42%, rgba(105, 150, 180, 0.9), transparent 24%),
+ radial-gradient(
+ circle at 58% 42%,
+ rgba(105, 150, 180, 0.9),
+ transparent 24%
+ ),
#08090c;
box-shadow:
inset 0 0 0 2px rgba(255, 255, 255, 0.06),
diff --git a/docs/extensions/browser-client.md b/docs/extensions/browser-client.md
index 938d8184..336356cd 100644
--- a/docs/extensions/browser-client.md
+++ b/docs/extensions/browser-client.md
@@ -39,15 +39,15 @@ client/
└── styles/
```
-| Folder | Responsibility |
-| ------------------------- | ----------------------------------------------------------------------- |
-| `api/` | Typed wrappers around the SimDeck REST API and shared TypeScript types. |
-| `features/simulators/` | Sidebar list of simulators plus boot/shutdown affordances. |
-| `features/viewport/` | Frame canvas, chrome compositing, hit testing. |
+| Folder | Responsibility |
+| ------------------------- | ---------------------------------------------------------------------------- |
+| `api/` | Typed wrappers around the SimDeck REST API and shared TypeScript types. |
+| `features/simulators/` | Sidebar list of simulators plus boot/shutdown affordances. |
+| `features/viewport/` | Frame canvas, chrome compositing, hit testing. |
| `features/stream/` | WebRTC H.264 client for iOS and Android, receiver stats, and frame plumbing. |
-| `features/input/` | Touch / keyboard / hardware-button affordances. |
-| `features/accessibility/` | Accessibility tree pane and source switcher. |
-| `features/toolbar/` | Top toolbar (rotate, home, app switcher, dark mode toggle, refresh). |
+| `features/input/` | Touch / keyboard / hardware-button affordances. |
+| `features/accessibility/` | Accessibility tree pane and source switcher. |
+| `features/toolbar/` | Top toolbar (rotate, home, app switcher, dark mode toggle, refresh). |
## Bootstrap flow
diff --git a/docs/guide/architecture.md b/docs/guide/architecture.md
index 641d4514..d7dd6fb7 100644
--- a/docs/guide/architecture.md
+++ b/docs/guide/architecture.md
@@ -20,18 +20,18 @@ Owns the public CLI shape (`simdeck`, `simdeck ui`, `daemon`, `boot`, `shutdown`
Key modules:
-| Module | Responsibility |
-| ----------------------------------- | ---------------------------------------------------------------------------------------------------------- |
-| `server/src/main.rs` | CLI entrypoint, project daemon management, AppKit main-thread shim, tokio runtime bootstrap. |
-| `server/src/api/routes.rs` | Every `/api/*` route, including simulator control, accessibility, and inspector proxy. |
+| Module | Responsibility |
+| ----------------------------------- | ----------------------------------------------------------------------------------------------------------- |
+| `server/src/main.rs` | CLI entrypoint, project daemon management, AppKit main-thread shim, tokio runtime bootstrap. |
+| `server/src/api/routes.rs` | Every `/api/*` route, including simulator control, accessibility, and inspector proxy. |
| `server/src/android.rs` | Android AVD discovery, emulator lifecycle, emulator gRPC input/video, screenshots, UIAutomator, and logcat. |
-| `server/src/transport/webrtc.rs` | WebRTC offer/answer endpoint for H.264 browser video. |
-| `server/src/transport/packet.rs` | Shared encoded frame type used between simulator sessions and transports. |
-| `server/src/inspector.rs` | WebSocket hub for the NativeScript runtime inspector. |
-| `server/src/simulators/registry.rs` | Per-UDID session registry with lazy attachment to the native bridge. |
-| `server/src/simulators/session.rs` | Frame broadcast channel, keyframe gating, refresh requests. |
-| `server/src/metrics/counters.rs` | Atomic counters and per-client stream stats accepted from stream transports or `/api/client-stream-stats`. |
-| `server/src/logs.rs` | `os_log` log streaming and filtering. |
+| `server/src/transport/webrtc.rs` | WebRTC offer/answer endpoint for H.264 browser video. |
+| `server/src/transport/packet.rs` | Shared encoded frame type used between simulator sessions and transports. |
+| `server/src/inspector.rs` | WebSocket hub for the NativeScript runtime inspector. |
+| `server/src/simulators/registry.rs` | Per-UDID session registry with lazy attachment to the native bridge. |
+| `server/src/simulators/session.rs` | Frame broadcast channel, keyframe gating, refresh requests. |
+| `server/src/metrics/counters.rs` | Atomic counters and per-client stream stats accepted from stream transports or `/api/client-stream-stats`. |
+| `server/src/logs.rs` | `os_log` log streaming and filtering. |
The Rust server runs the tokio runtime on a worker thread while the AppKit main loop spins on the main thread. The native bridge needs the main loop to deliver display callbacks and HID events.
diff --git a/server/src/android.rs b/server/src/android.rs
index 05cf00f1..d3a5daaa 100644
--- a/server/src/android.rs
+++ b/server/src/android.rs
@@ -22,6 +22,9 @@ const RUNNING_EMULATOR_CACHE_TTL: Duration = Duration::from_secs(2);
const AVD_GRPC_PORT_CACHE_TTL: Duration = Duration::from_secs(60);
const SCREEN_SIZE_CACHE_TTL: Duration = Duration::from_secs(60);
+type TimedMap = Option<(Instant, HashMap)>;
+type ScreenSizeCache = HashMap;
+
#[derive(Clone, Default)]
pub struct AndroidBridge;
@@ -244,7 +247,7 @@ impl AndroidBridge {
pub fn pasteboard_text(&self, id: &str) -> Result {
let serial = self.serial_for_id(id)?;
- Ok(self.run_adb_shell(&serial, "cmd clipboard get")?)
+ self.run_adb_shell(&serial, "cmd clipboard get")
}
pub fn send_touch(&self, id: &str, x: f64, y: f64, phase: &str) -> Result<(), AppError> {
@@ -732,7 +735,7 @@ impl AndroidBridge {
}
fn running_emulators(&self) -> Result, AppError> {
- static CACHE: OnceLock)>>> = OnceLock::new();
+ static CACHE: OnceLock>> = OnceLock::new();
let cache = CACHE.get_or_init(|| Mutex::new(None));
if let Some((updated_at, running)) = cache.lock().unwrap().as_ref() {
if updated_at.elapsed() < RUNNING_EMULATOR_CACHE_TTL {
@@ -766,7 +769,7 @@ impl AndroidBridge {
}
fn grpc_port_for_avd(&self, avd_name: &str) -> Result {
- static CACHE: OnceLock)>>> = OnceLock::new();
+ static CACHE: OnceLock>> = OnceLock::new();
let cache = CACHE.get_or_init(|| Mutex::new(None));
if let Some((updated_at, ports)) = cache.lock().unwrap().as_ref() {
if updated_at.elapsed() < AVD_GRPC_PORT_CACHE_TTL {
@@ -793,7 +796,7 @@ impl AndroidBridge {
}
fn screen_size_for_serial(&self, serial: &str) -> Result<(f64, f64), AppError> {
- static CACHE: OnceLock>> = OnceLock::new();
+ static CACHE: OnceLock> = OnceLock::new();
let cache = CACHE.get_or_init(|| Mutex::new(HashMap::new()));
if let Some((updated_at, size)) = cache.lock().unwrap().get(serial) {
if updated_at.elapsed() < SCREEN_SIZE_CACHE_TTL {
From 459a80e6ca67112a2470683ed6daa990d0f4651c Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 14:11:18 -0400
Subject: [PATCH 03/29] Harden CLI integration fixture launch
---
scripts/integration/cli.mjs | 22 ++++++++++++++++++++++
1 file changed, 22 insertions(+)
diff --git a/scripts/integration/cli.mjs b/scripts/integration/cli.mjs
index 461cf2b7..c9551b7f 100644
--- a/scripts/integration/cli.mjs
+++ b/scripts/integration/cli.mjs
@@ -837,6 +837,28 @@ async function ensureFixtureForeground(label, options = {}) {
if (launchError === null) {
throw verifyError;
}
+ logStep(`${label}: opening fixture URL after launch timeout`);
+ }
+
+ try {
+ await retrySimdeckJson(
+ cliArgs(["open-url", simulatorUDID, fixtureUrl]),
+ `${label} fixture URL fallback`,
+ {
+ attempts: 2,
+ delayMs: 2_000,
+ timeoutMs: 180_000,
+ },
+ );
+ return await verifyUi(label, {
+ expectFixture: true,
+ attempts: options.fallbackVerifyAttempts ?? 12,
+ delayMs: options.fallbackVerifyDelayMs ?? 1_500,
+ });
+ } catch (urlError) {
+ logStep(
+ `${label}: fixture URL fallback failed: ${summarizeError(urlError)}`,
+ );
logStep(`${label}: tapping fixture icon after launch timeout`);
}
From ba44885e9f9e217c1baca4dd9269eff92bcc0c9a Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 14:37:39 -0400
Subject: [PATCH 04/29] Render Android emulators screen only
---
client/src/api/types.ts | 2 +-
client/src/app/AppShell.tsx | 9 +---
.../simulators/simulatorDisplay.test.ts | 12 +++++
.../features/simulators/simulatorDisplay.ts | 3 --
client/src/features/viewport/DeviceChrome.tsx | 51 ++++++++-----------
.../features/viewport/SimulatorViewport.tsx | 3 --
client/src/styles/components.css | 51 -------------------
server/src/android.rs | 13 ++---
8 files changed, 41 insertions(+), 103 deletions(-)
diff --git a/client/src/api/types.ts b/client/src/api/types.ts
index 79d6b5dc..ebd7b4ed 100644
--- a/client/src/api/types.ts
+++ b/client/src/api/types.ts
@@ -118,7 +118,7 @@ export interface ChromeProfile {
screenWidth: number;
screenHeight: number;
cornerRadius: number;
- chromeStyle?: "asset" | "css-android" | string;
+ chromeStyle?: "asset" | string;
hasScreenMask?: boolean;
buttons?: ChromeButtonProfile[];
}
diff --git a/client/src/app/AppShell.tsx b/client/src/app/AppShell.tsx
index 78f444dd..55b51e2c 100644
--- a/client/src/app/AppShell.tsx
+++ b/client/src/app/AppShell.tsx
@@ -745,13 +745,9 @@ export function AppShell({
: "",
[selectedSimulator?.udid, streamStamp],
);
- const chromeUsesAsset = Boolean(
- viewportChromeProfile &&
- viewportChromeProfile.chromeStyle !== "css-android",
- );
+ const chromeUsesAsset = Boolean(viewportChromeProfile && chromeUrl);
const chromeRequired = Boolean(
- (shouldRenderChrome && !chromeProfileReady) ||
- (chromeUsesAsset && chromeUrl),
+ (shouldRenderChrome && !chromeProfileReady) || chromeUsesAsset,
);
const simulatorRotationQuarterTurns =
normalizeSimulatorRotationQuarterTurns(selectedSimulator);
@@ -1991,7 +1987,6 @@ export function AppShell({
chromeProfile={viewportChromeProfile}
chromeRequired={chromeRequired}
chromeScreenStyle={viewportScreenStyle}
- chromeStyle={viewportChromeProfile?.chromeStyle}
chromeUrl={chromeUrl}
chromeButtonUrl={chromeButtonUrl}
debugPanel={
diff --git a/client/src/features/simulators/simulatorDisplay.test.ts b/client/src/features/simulators/simulatorDisplay.test.ts
index 58c0b2f9..f39a5507 100644
--- a/client/src/features/simulators/simulatorDisplay.test.ts
+++ b/client/src/features/simulators/simulatorDisplay.test.ts
@@ -51,4 +51,16 @@ describe("simulatorDisplay", () => {
),
).toBe(false);
});
+
+ it("keeps native chrome off for Android emulators", () => {
+ expect(
+ shouldRenderNativeChrome(
+ simulator({
+ deviceTypeIdentifier: "android-emulator",
+ name: "SimDeck Pixel",
+ platform: "android-emulator",
+ }),
+ ),
+ ).toBe(false);
+ });
});
diff --git a/client/src/features/simulators/simulatorDisplay.ts b/client/src/features/simulators/simulatorDisplay.ts
index dc2cd995..0837ef20 100644
--- a/client/src/features/simulators/simulatorDisplay.ts
+++ b/client/src/features/simulators/simulatorDisplay.ts
@@ -14,9 +14,6 @@ export function simulatorRuntimeLabel(simulator: SimulatorMetadata): string {
export function shouldRenderNativeChrome(
simulator: SimulatorMetadata,
): boolean {
- if (simulator.platform === "android-emulator") {
- return true;
- }
const identifier = simulator.deviceTypeIdentifier ?? "";
const name = simulator.name ?? "";
const deviceTypeName = simulator.deviceTypeName ?? "";
diff --git a/client/src/features/viewport/DeviceChrome.tsx b/client/src/features/viewport/DeviceChrome.tsx
index 2ce43f4c..dfa0bb8c 100644
--- a/client/src/features/viewport/DeviceChrome.tsx
+++ b/client/src/features/viewport/DeviceChrome.tsx
@@ -17,7 +17,6 @@ interface DeviceChromeProps {
accessibilitySelectedId: string;
chromeProfile: ChromeProfile | null;
chromeScreenStyle: CSSProperties | null;
- chromeStyle?: string;
chromeUrl: string;
chromeButtonUrl: (button: string, pressed?: boolean) => string;
hasFrame: boolean;
@@ -62,7 +61,6 @@ export function DeviceChrome({
accessibilitySelectedId,
chromeProfile,
chromeScreenStyle,
- chromeStyle,
chromeUrl,
chromeButtonUrl,
hasFrame,
@@ -95,42 +93,35 @@ export function DeviceChrome({
useChromeProfile,
}: DeviceChromeProps) {
if (useChromeProfile) {
- const useCssAndroidChrome = chromeStyle === "css-android";
return (
- {useCssAndroidChrome ? (
-
- ) : (
- <>
-
-
-
- >
- )}
+
+
+
Result {
let serial = self.serial_for_id(id)?;
let (width, height) = self.screen_size_for_serial(&serial)?;
- let horizontal_bezel = (width * 0.055).clamp(48.0, 80.0);
- let vertical_bezel = (height * 0.04).clamp(64.0, 104.0);
Ok(json!({
- "totalWidth": width + horizontal_bezel * 2.0,
- "totalHeight": height + vertical_bezel * 2.0,
- "screenX": horizontal_bezel,
- "screenY": vertical_bezel,
+ "totalWidth": width,
+ "totalHeight": height,
+ "screenX": 0,
+ "screenY": 0,
"screenWidth": width,
"screenHeight": height,
- "cornerRadius": (width * 0.055).clamp(32.0, 56.0),
+ "cornerRadius": 0,
"hasScreenMask": false,
- "chromeStyle": "css-android",
}))
}
From 910ccdfef1d0c18f53f7400942b4c9a944953df8 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 15:07:09 -0400
Subject: [PATCH 05/29] Improve Android inspector and stream pacing
---
client/src/api/types.ts | 13 +++
client/src/app/AppShell.tsx | 28 ++++--
client/src/app/uiState.ts | 2 +
.../accessibility/AccessibilityInspector.tsx | 36 +++++++-
client/src/features/viewport/DeviceChrome.tsx | 14 ++-
.../features/viewport/SimulatorViewport.tsx | 3 +
client/src/styles/components.css | 11 +++
server/src/android.rs | 92 +++++++++++++++++--
server/src/transport/webrtc.rs | 57 ++++++++----
9 files changed, 221 insertions(+), 35 deletions(-)
diff --git a/client/src/api/types.ts b/client/src/api/types.ts
index ebd7b4ed..8b12120f 100644
--- a/client/src/api/types.ts
+++ b/client/src/api/types.ts
@@ -165,15 +165,23 @@ export interface AccessibilityNode {
AXUniqueId?: string | null;
AXValue?: string | null;
alpha?: number | null;
+ androidClass?: string | null;
+ androidPackage?: string | null;
+ androidResourceId?: string | null;
backgroundColor?: Record | null;
bounds?: AccessibilityFrame | null;
+ checkable?: boolean | null;
+ checked?: boolean | null;
className?: string | null;
+ clickable?: boolean | null;
children?: AccessibilityNode[];
control?: Record | null;
content_required?: boolean | null;
custom_actions?: string[] | null;
debugDescription?: string | null;
enabled?: boolean | null;
+ focusable?: boolean | null;
+ focused?: boolean | null;
frame?: AccessibilityFrame | null;
frameInScreen?: AccessibilityFrame | null;
flutter?: Record | null;
@@ -183,8 +191,10 @@ export interface AccessibilityNode {
isHidden?: boolean | null;
isOpaque?: boolean | null;
isUserInteractionEnabled?: boolean | null;
+ longClickable?: boolean | null;
moduleName?: string | null;
nativeScript?: Record | null;
+ password?: boolean | null;
pid?: number | null;
placeholder?: string | null;
reactNative?: Record | null;
@@ -192,6 +202,8 @@ export interface AccessibilityNode {
role_description?: string | null;
scroll?: Record | null;
semantics?: Record | null;
+ scrollable?: boolean | null;
+ selected?: boolean | null;
source?:
| "native-ax"
| "in-app-inspector"
@@ -218,6 +230,7 @@ export interface AccessibilityNode {
}
export type AccessibilitySource =
+ | "android-uiautomator"
| "native-ax"
| "in-app-inspector"
| "nativescript"
diff --git a/client/src/app/AppShell.tsx b/client/src/app/AppShell.tsx
index 55b51e2c..2bf85df6 100644
--- a/client/src/app/AppShell.tsx
+++ b/client/src/app/AppShell.tsx
@@ -1280,6 +1280,7 @@ export function AppShell({
viewportChromeProfile,
chromeScreenRect,
);
+ const isAndroidViewport = isAndroidSimulator(selectedSimulator);
const chromeScreenStyle =
viewportChromeProfile && chromeScreenRect
? ({
@@ -1310,15 +1311,19 @@ export function AppShell({
} satisfies CSSProperties)
: null;
const screenOnlyStyle =
- !viewportChromeProfile && chromeProfile && chromeProfile.screenWidth > 0
+ !viewportChromeProfile && isAndroidViewport
? ({
- borderRadius: `${Math.min(
- chromeProfile.cornerRadius *
- (DEVICE_SCREEN_WIDTH / chromeProfile.screenWidth),
- DEVICE_SCREEN_WIDTH / 2,
- )}px`,
+ borderRadius: "10px",
} satisfies CSSProperties)
- : null;
+ : !viewportChromeProfile && chromeProfile && chromeProfile.screenWidth > 0
+ ? ({
+ borderRadius: `${Math.min(
+ chromeProfile.cornerRadius *
+ (DEVICE_SCREEN_WIDTH / chromeProfile.screenWidth),
+ DEVICE_SCREEN_WIDTH / 2,
+ )}px`,
+ } satisfies CSSProperties)
+ : null;
const viewportScreenStyle = chromeScreenStyle ?? screenOnlyStyle;
const shellStyle = viewportChromeProfile
? {
@@ -2046,6 +2051,7 @@ export function AppShell({
outerCanvasRef={handleOuterCanvasRef}
rotationQuarterTurns={rotationQuarterTurns}
screenAspect={screenAspect}
+ screenClassName={isAndroidViewport ? "android-screen" : undefined}
selectedSimulator={selectedSimulator}
shellStyle={shellStyle}
streamCanvasRef={handleStreamCanvasRef}
@@ -2217,6 +2223,14 @@ function normalizeMaxEdge(
: fallback;
}
+function isAndroidSimulator(simulator: SimulatorMetadata | null): boolean {
+ return Boolean(
+ simulator?.platform === "android-emulator" ||
+ simulator?.deviceTypeIdentifier === "android-emulator" ||
+ simulator?.udid.startsWith("android:"),
+ );
+}
+
function streamConfigsEqual(left: StreamConfig, right: StreamConfig): boolean {
return (
left.encoder === right.encoder &&
diff --git a/client/src/app/uiState.ts b/client/src/app/uiState.ts
index 98871d0c..0a00e027 100644
--- a/client/src/app/uiState.ts
+++ b/client/src/app/uiState.ts
@@ -37,6 +37,7 @@ const ACCESSIBILITY_SOURCE_ORDER: AccessibilitySource[] = [
"flutter",
"swiftui",
"in-app-inspector",
+ "android-uiautomator",
"native-ax",
];
@@ -162,6 +163,7 @@ export function isAccessibilitySource(
value === "flutter" ||
value === "swiftui" ||
value === "in-app-inspector" ||
+ value === "android-uiautomator" ||
value === "native-ax"
);
}
diff --git a/client/src/features/accessibility/AccessibilityInspector.tsx b/client/src/features/accessibility/AccessibilityInspector.tsx
index bf2c8d44..42a866c3 100644
--- a/client/src/features/accessibility/AccessibilityInspector.tsx
+++ b/client/src/features/accessibility/AccessibilityInspector.tsx
@@ -464,17 +464,24 @@ function NodeDetails({
node: AccessibilityNode;
selectedSimulator: SimulatorMetadata | null;
}) {
+ const isAndroid = isAndroidSimulator(selectedSimulator);
const details = [
["Type", accessibilityKind(node)],
["Label", primaryAccessibilityText(node)],
["Source", sourceLocationText(node)],
- ["Identifier", accessibilityIdentifier(node)],
+ [
+ isAndroid ? "Resource ID" : "Identifier",
+ isAndroid
+ ? (node.androidResourceId ?? "")
+ : accessibilityIdentifier(node),
+ ],
["Inspector ID", node.inspectorId ?? ""],
["Module", node.moduleName ?? ""],
["NativeScript", nativeScriptDescription(node.nativeScript)],
["React Native", reactNativeDescription(node.reactNative)],
["Flutter", flutterDescription(node.flutter)],
- ["UIKit Class", node.className ?? ""],
+ [isAndroid ? "Android Class" : "UIKit Class", node.className ?? ""],
+ ["Package", isAndroid ? (node.androidPackage ?? "") : ""],
["Last JS", lastUIKitScriptText(node)],
["Value", node.AXValue ?? ""],
["Role", node.role ?? ""],
@@ -483,6 +490,15 @@ function NodeDetails({
["SwiftUI", swiftUIDescription(node.swiftUI)],
["Enabled", node.enabled == null ? "" : node.enabled ? "true" : "false"],
["Hidden", node.isHidden == null ? "" : node.isHidden ? "true" : "false"],
+ ["Clickable", boolDetail(isAndroid, node.clickable)],
+ ["Long Clickable", boolDetail(isAndroid, node.longClickable)],
+ ["Focusable", boolDetail(isAndroid, node.focusable)],
+ ["Focused", boolDetail(isAndroid, node.focused)],
+ ["Scrollable", boolDetail(isAndroid, node.scrollable)],
+ ["Checkable", boolDetail(isAndroid, node.checkable)],
+ ["Checked", boolDetail(isAndroid, node.checked)],
+ ["Selected", boolDetail(isAndroid, node.selected)],
+ ["Password", boolDetail(isAndroid, node.password)],
["Alpha", node.alpha == null ? "" : String(round(node.alpha))],
["Frame", validFrame(node.frame) ? frameText(node.frame) : ""],
["PID", node.pid == null ? "" : String(node.pid)],
@@ -504,6 +520,18 @@ function NodeDetails({
);
}
+function isAndroidSimulator(simulator: SimulatorMetadata | null): boolean {
+ return Boolean(
+ simulator?.platform === "android-emulator" ||
+ simulator?.deviceTypeIdentifier === "android-emulator" ||
+ simulator?.udid.startsWith("android:"),
+ );
+}
+
+function boolDetail(include: boolean, value: boolean | null | undefined) {
+ return include && value != null ? (value ? "true" : "false") : "";
+}
+
function UIKitScriptEditor({
node,
selectedSimulator,
@@ -737,6 +765,7 @@ const HIERARCHY_SOURCE_ORDER: AccessibilitySource[] = [
"flutter",
"swiftui",
"in-app-inspector",
+ "android-uiautomator",
"native-ax",
];
@@ -770,6 +799,9 @@ function sourceLabel(source: AccessibilitySource): string {
if (source === "swiftui") {
return "SwiftUI";
}
+ if (source === "android-uiautomator") {
+ return "Android";
+ }
return source === "in-app-inspector" ? "UIKit" : "Native AX";
}
diff --git a/client/src/features/viewport/DeviceChrome.tsx b/client/src/features/viewport/DeviceChrome.tsx
index dfa0bb8c..41f96325 100644
--- a/client/src/features/viewport/DeviceChrome.tsx
+++ b/client/src/features/viewport/DeviceChrome.tsx
@@ -43,6 +43,7 @@ interface DeviceChromeProps {
onStartPanning: (event: React.PointerEvent) => void;
rotationQuarterTurns: number;
screenAspect: string;
+ screenClassName?: string;
shellStyle: CSSProperties | null;
simulatorName: string;
streamBackend: string;
@@ -82,6 +83,7 @@ export function DeviceChrome({
onStartPanning,
rotationQuarterTurns,
screenAspect,
+ screenClassName,
shellStyle,
simulatorName,
streamBackend,
@@ -141,6 +143,7 @@ export function DeviceChrome({
onSimulatorInteraction={onSimulatorInteraction}
rotationQuarterTurns={rotationQuarterTurns}
simulatorName={simulatorName}
+ screenClassName={screenClassName}
streamBackend={streamBackend}
streamCanvasRef={streamCanvasRef}
streamCanvasKey={streamCanvasKey}
@@ -183,6 +186,7 @@ export function DeviceChrome({
onSimulatorInteraction={onSimulatorInteraction}
rotationQuarterTurns={rotationQuarterTurns}
simulatorName={simulatorName}
+ screenClassName={screenClassName}
streamBackend={streamBackend}
streamCanvasRef={streamCanvasRef}
streamCanvasKey={streamCanvasKey}
@@ -423,6 +427,7 @@ interface ScreenLayerProps {
onPickerSelect: (id: string) => void;
onSimulatorInteraction: () => void;
rotationQuarterTurns: number;
+ screenClassName?: string;
simulatorName: string;
streamBackend: string;
streamCanvasRef: Ref;
@@ -451,6 +456,7 @@ function ScreenLayer({
onPickerSelect,
onSimulatorInteraction,
rotationQuarterTurns,
+ screenClassName,
simulatorName,
streamBackend,
streamCanvasRef,
@@ -462,7 +468,13 @@ function ScreenLayer({
}: ScreenLayerProps) {
return (
{
onSimulatorInteraction();
diff --git a/client/src/features/viewport/SimulatorViewport.tsx b/client/src/features/viewport/SimulatorViewport.tsx
index f4ebe360..000f75ff 100644
--- a/client/src/features/viewport/SimulatorViewport.tsx
+++ b/client/src/features/viewport/SimulatorViewport.tsx
@@ -57,6 +57,7 @@ interface SimulatorViewportProps {
outerCanvasRef: Ref
;
rotationQuarterTurns: number;
screenAspect: string;
+ screenClassName?: string;
selectedSimulator: SimulatorMetadata | null;
shellStyle: CSSProperties | null;
streamBackend: string;
@@ -114,6 +115,7 @@ export function SimulatorViewport({
outerCanvasRef,
rotationQuarterTurns,
screenAspect,
+ screenClassName,
selectedSimulator,
shellStyle,
streamBackend,
@@ -200,6 +202,7 @@ export function SimulatorViewport({
onStartPanning={onStartPanning}
rotationQuarterTurns={rotationQuarterTurns}
screenAspect={screenAspect}
+ screenClassName={screenClassName}
shellStyle={shellStyle}
simulatorName={selectedSimulator.name}
streamBackend={streamBackend}
diff --git a/client/src/styles/components.css b/client/src/styles/components.css
index df0b0127..724f3825 100644
--- a/client/src/styles/components.css
+++ b/client/src/styles/components.css
@@ -667,6 +667,12 @@
color: color-mix(in srgb, #d7ba7d 82%, var(--text));
}
+.hierarchy-source-pill.source-android-uiautomator {
+ border-color: color-mix(in srgb, #7fd97f 55%, var(--border));
+ background: color-mix(in srgb, #7fd97f 13%, transparent);
+ color: color-mix(in srgb, #7fd97f 82%, var(--text));
+}
+
.hierarchy-source-pill.active {
gap: 5px;
padding-inline: 7px 8px;
@@ -1601,6 +1607,11 @@
box-shadow: 0 0 0 1px var(--screen-bg);
}
+.device-screen.android-screen {
+ background: transparent;
+ border-radius: 10px;
+}
+
.stream-canvas {
position: absolute;
inset: 0;
diff --git a/server/src/android.rs b/server/src/android.rs
index 3e6d66e5..c4687476 100644
--- a/server/src/android.rs
+++ b/server/src/android.rs
@@ -1086,6 +1086,8 @@ fn android_node_value(node: roxmltree::Node<'_, '_>, depth: usize, max_depth: us
let text = node.attribute("text").unwrap_or("");
let content_desc = node.attribute("content-desc").unwrap_or("");
let label = if !text.is_empty() { text } else { content_desc };
+ let resource_id = node.attribute("resource-id").unwrap_or("");
+ let role = android_role(node, short_class);
let mut children = Vec::new();
if depth < max_depth {
for child in node.children().filter(|child| child.has_tag_name("node")) {
@@ -1094,15 +1096,27 @@ fn android_node_value(node: roxmltree::Node<'_, '_>, depth: usize, max_depth: us
}
json!({
"source": "android-uiautomator",
- "type": map_android_class(short_class),
- "role": map_android_class(short_class),
+ "type": android_type(short_class, class_name),
+ "role": role,
"className": class_name,
- "AXIdentifier": node.attribute("resource-id").unwrap_or(""),
+ "AXIdentifier": resource_id,
"AXLabel": label,
"AXValue": text,
+ "androidClass": class_name,
+ "androidPackage": node.attribute("package").unwrap_or(""),
+ "androidResourceId": resource_id,
+ "checkable": bool_attr(node, "checkable"),
+ "checked": bool_attr(node, "checked"),
+ "clickable": bool_attr(node, "clickable"),
+ "focusable": bool_attr(node, "focusable"),
+ "focused": bool_attr(node, "focused"),
+ "longClickable": bool_attr(node, "long-clickable"),
+ "password": bool_attr(node, "password"),
+ "scrollable": bool_attr(node, "scrollable"),
+ "selected": bool_attr(node, "selected"),
"text": text,
"title": label,
- "enabled": node.attribute("enabled") == Some("true"),
+ "enabled": bool_attr(node, "enabled"),
"isHidden": node.attribute("visible-to-user") == Some("false"),
"frame": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
"frameInScreen": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
@@ -1132,7 +1146,26 @@ fn frame_value(x: f64, y: f64, width: f64, height: f64) -> Value {
json!({ "x": x, "y": y, "width": width, "height": height })
}
-fn map_android_class(class_name: &str) -> &'static str {
+fn bool_attr(node: roxmltree::Node<'_, '_>, name: &str) -> bool {
+ node.attribute(name) == Some("true")
+}
+
+fn android_type(short_class: &str, class_name: &str) -> String {
+ let fallback = if short_class.is_empty() {
+ class_name
+ } else {
+ short_class
+ };
+ if fallback.is_empty() {
+ "View".to_owned()
+ } else {
+ fallback.to_owned()
+ }
+}
+
+fn android_role(node: roxmltree::Node<'_, '_>, class_name: &str) -> &'static str {
+ let clickable = bool_attr(node, "clickable");
+ let scrollable = bool_attr(node, "scrollable");
match class_name {
"Button" | "ImageButton" | "FloatingActionButton" => "button",
"EditText" => "textField",
@@ -1142,10 +1175,18 @@ fn map_android_class(class_name: &str) -> &'static str {
"RadioButton" => "radioButton",
"Switch" | "ToggleButton" => "switch",
"SeekBar" => "slider",
- "RecyclerView" | "ListView" => "table",
- "ScrollView" | "HorizontalScrollView" | "NestedScrollView" => "scrollView",
+ "RecyclerView" | "ListView" | "GridView" => "collection",
+ "ScrollView" | "HorizontalScrollView" | "NestedScrollView" | "ViewPager" => "scrollView",
"WebView" => "webView",
- _ => "other",
+ "ProgressBar" => "progressIndicator",
+ "Spinner" => "popUpButton",
+ "TabWidget" | "TabLayout" => "tabGroup",
+ "Toolbar" | "ActionBar" => "toolbar",
+ "ViewGroup" | "FrameLayout" | "LinearLayout" | "RelativeLayout" | "ConstraintLayout"
+ | "CoordinatorLayout" | "DrawerLayout" => "container",
+ _ if scrollable => "scrollView",
+ _ if clickable => "button",
+ _ => "view",
}
}
@@ -1180,6 +1221,41 @@ fn shell_quote(value: &str) -> String {
format!("'{}'", value.replace('\'', "'\\''"))
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn android_nodes_keep_class_type_and_semantic_role() {
+ let document = roxmltree::Document::parse(
+ r#" "#,
+ )
+ .unwrap();
+
+ let value = android_node_value(document.root_element(), 0, 10);
+
+ assert_eq!(value["type"], "ViewGroup");
+ assert_eq!(value["role"], "container");
+ assert_eq!(value["AXIdentifier"], "com.example:id/hotseat");
+ assert_eq!(value["androidClass"], "android.view.ViewGroup");
+ assert_eq!(value["androidResourceId"], "com.example:id/hotseat");
+ assert_eq!(value["enabled"], true);
+ }
+
+ #[test]
+ fn clickable_unknown_android_nodes_are_buttons() {
+ let document = roxmltree::Document::parse(
+ r#" "#,
+ )
+ .unwrap();
+
+ let value = android_node_value(document.root_element(), 0, 10);
+
+ assert_eq!(value["type"], "CustomTile");
+ assert_eq!(value["role"], "button");
+ }
+}
+
#[allow(dead_code)]
fn _dedupe(values: impl IntoIterator- ) -> Vec
{
let mut seen = HashSet::new();
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index ccae059b..2d4d3c29 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -58,7 +58,7 @@ const WEBRTC_FULL_ICE_GATHER_TIMEOUT: Duration = Duration::from_secs(3);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
-const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 1280;
+const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 960;
const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
@@ -1096,7 +1096,7 @@ impl AndroidWebRtcSource {
) -> Result {
let mut frame_stream = bridge.grpc_frame_stream(&udid, Some(max_edge)).await?;
let (sender, _) = broadcast::channel(ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY);
- let (shutdown_tx, mut shutdown_rx) = broadcast::channel(1);
+ let (shutdown_tx, _) = broadcast::channel(1);
let inner = Arc::new(AndroidWebRtcSourceInner {
udid: udid.clone(),
encoder_handle: AtomicUsize::new(0),
@@ -1130,34 +1130,52 @@ impl AndroidWebRtcSource {
.store(user_data as usize, Ordering::Release);
let source = Self { inner };
- let task_inner = Arc::downgrade(&source.inner);
+ let latest_frame = Arc::new(Mutex::new(None::));
+ let reader_inner = Arc::downgrade(&source.inner);
+ let reader_latest_frame = latest_frame.clone();
+ let mut reader_shutdown_rx = source.inner.shutdown_tx.subscribe();
tokio::spawn(async move {
- let min_frame_gap = android_webrtc_frame_interval();
- let mut last_encoded_at = Instant::now() - min_frame_gap;
loop {
tokio::select! {
- _ = shutdown_rx.recv() => break,
+ _ = reader_shutdown_rx.recv() => break,
frame = frame_stream.next_frame() => {
- let frame = match frame {
- Ok(Some(frame)) => frame,
+ match frame {
+ Ok(Some(frame)) => {
+ *reader_latest_frame.lock().unwrap() = Some(frame);
+ }
Ok(None) => break,
Err(error) => {
- let udid = task_inner
+ let udid = reader_inner
.upgrade()
.map(|inner| inner.udid.clone())
.unwrap_or_else(|| "android".to_owned());
warn!("Android WebRTC raw frame stream failed for {udid}: {error}");
break;
}
- };
- let Some(inner) = task_inner.upgrade() else {
+ }
+ }
+ }
+ }
+ });
+
+ let encoder_inner = Arc::downgrade(&source.inner);
+ let encoder_latest_frame = latest_frame;
+ let mut encoder_shutdown_rx = source.inner.shutdown_tx.subscribe();
+ tokio::spawn(async move {
+ let min_frame_gap = android_webrtc_frame_interval();
+ let mut ticker = time::interval(min_frame_gap);
+ ticker.set_missed_tick_behavior(time::MissedTickBehavior::Skip);
+ loop {
+ tokio::select! {
+ _ = encoder_shutdown_rx.recv() => break,
+ _ = ticker.tick() => {
+ let Some(inner) = encoder_inner.upgrade() else {
break;
};
- let now = Instant::now();
- if now.duration_since(last_encoded_at) < min_frame_gap {
+ let frame = encoder_latest_frame.lock().unwrap().take();
+ let Some(frame) = frame else {
continue;
- }
- last_encoded_at = now;
+ };
let handle = inner.encoder_handle.load(Ordering::Acquire);
let udid = inner.udid.clone();
let encode_result = task::spawn_blocking(move || {
@@ -1358,11 +1376,16 @@ unsafe fn take_native_error(raw: *mut i8) -> Option {
}
fn android_webrtc_max_edge() -> u32 {
- std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
+ let android_cap = std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
.ok()
.and_then(|value| value.parse::().ok())
.unwrap_or(DEFAULT_ANDROID_WEBRTC_MAX_EDGE)
- .clamp(360, 2400)
+ .clamp(360, 2400);
+ std::env::var("SIMDECK_REALTIME_MAX_EDGE")
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .map(|value| value.clamp(360, 2400).min(android_cap))
+ .unwrap_or(android_cap)
}
fn android_webrtc_frame_interval() -> Duration {
From 7e57db2cd92733cc681acf6274dacd373ed93185 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 15:36:08 -0400
Subject: [PATCH 06/29] Fix Android WebRTC stream startup
---
client/src/features/stream/streamTypes.ts | 1 +
.../stream/streamWorkerClient.test.ts | 28 +++++++++++++++++++
.../src/features/stream/streamWorkerClient.ts | 21 ++++++++++++--
client/src/features/stream/useLiveStream.ts | 3 ++
server/src/transport/webrtc.rs | 26 +++++++++++------
5 files changed, 68 insertions(+), 11 deletions(-)
create mode 100644 client/src/features/stream/streamWorkerClient.test.ts
diff --git a/client/src/features/stream/streamTypes.ts b/client/src/features/stream/streamTypes.ts
index f55eb58c..4e9b6099 100644
--- a/client/src/features/stream/streamTypes.ts
+++ b/client/src/features/stream/streamTypes.ts
@@ -2,6 +2,7 @@ import type { Size } from "../viewport/types";
export interface StreamConnectTarget {
clientId?: string;
+ platform?: string;
remote?: boolean;
streamConfig?: StreamConfig;
transport?: StreamTransport;
diff --git a/client/src/features/stream/streamWorkerClient.test.ts b/client/src/features/stream/streamWorkerClient.test.ts
new file mode 100644
index 00000000..dc079f4b
--- /dev/null
+++ b/client/src/features/stream/streamWorkerClient.test.ts
@@ -0,0 +1,28 @@
+import { describe, expect, it } from "vitest";
+
+import {
+ buildStreamTarget,
+ initialStreamBackend,
+ preferredStreamBackend,
+} from "./streamWorkerClient";
+
+describe("streamWorkerClient", () => {
+ it("forces Android emulator streams onto WebRTC even when H264 is requested", () => {
+ const target = buildStreamTarget("android:emulator-5554", {
+ platform: "android-emulator",
+ transport: "h264",
+ });
+
+ expect(preferredStreamBackend(target)).toBe("webrtc");
+ expect(initialStreamBackend(target)).toBe("webrtc");
+ });
+
+ it("treats Android UDID prefixes as WebRTC-only stream targets", () => {
+ const target = buildStreamTarget("android:Pixel_8", {
+ transport: "h264",
+ });
+
+ expect(preferredStreamBackend(target)).toBe("webrtc");
+ expect(initialStreamBackend(target)).toBe("webrtc");
+ });
+});
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index e5397753..2212fdf2 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -224,6 +224,7 @@ export function buildStreamTarget(
udid: string,
options: {
clientId?: string;
+ platform?: string;
remote?: boolean;
streamConfig?: StreamConfig;
transport?: StreamTransport;
@@ -231,6 +232,7 @@ export function buildStreamTarget(
): StreamConnectTarget {
return {
clientId: options.clientId,
+ platform: options.platform,
remote: options.remote,
streamConfig: options.streamConfig,
transport: options.transport,
@@ -2725,9 +2727,12 @@ export class StreamWorkerClient {
};
}
-function preferredStreamBackend(
+export function preferredStreamBackend(
target?: StreamConnectTarget | null,
): "auto" | StreamBackend {
+ if (isAndroidStreamTarget(target)) {
+ return "webrtc";
+ }
const value =
target?.transport ??
new URLSearchParams(window.location.search).get("stream");
@@ -2737,7 +2742,12 @@ function preferredStreamBackend(
return value === "webrtc" ? "webrtc" : "auto";
}
-function initialStreamBackend(target: StreamConnectTarget): StreamBackend {
+export function initialStreamBackend(
+ target: StreamConnectTarget,
+): StreamBackend {
+ if (isAndroidStreamTarget(target)) {
+ return "webrtc";
+ }
const preferredBackend = preferredStreamBackend(target);
if (preferredBackend === "h264-ws") {
return canUseH264WebSocket() ? "h264-ws" : "webrtc";
@@ -2756,3 +2766,10 @@ function nextAutoFallbackBackend(
}
return null;
}
+
+function isAndroidStreamTarget(target?: StreamConnectTarget | null): boolean {
+ return (
+ target?.platform === "android-emulator" ||
+ Boolean(target?.udid.startsWith("android:"))
+ );
+}
diff --git a/client/src/features/stream/useLiveStream.ts b/client/src/features/stream/useLiveStream.ts
index 55f40118..60b980a2 100644
--- a/client/src/features/stream/useLiveStream.ts
+++ b/client/src/features/stream/useLiveStream.ts
@@ -301,6 +301,7 @@ export function useLiveStream({
const targetKey = [
simulator.udid,
+ simulator.platform ?? "",
remote ? "remote" : "local",
streamTransport,
].join("|");
@@ -316,6 +317,7 @@ export function useLiveStream({
workerClient.connect(
buildStreamTarget(simulator.udid, {
clientId: clientTelemetryIdRef.current,
+ platform: simulator.platform,
remote,
streamConfig,
transport: streamTransport,
@@ -324,6 +326,7 @@ export function useLiveStream({
}, [
canvasElement,
simulator?.isBooted,
+ simulator?.platform,
simulator?.udid,
paused,
remote,
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index 2d4d3c29..895b2b56 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -1176,6 +1176,9 @@ impl AndroidWebRtcSource {
let Some(frame) = frame else {
continue;
};
+ if inner.latest_keyframe.read().unwrap().is_none() {
+ inner.request_keyframe();
+ }
let handle = inner.encoder_handle.load(Ordering::Acquire);
let udid = inner.udid.clone();
let encode_result = task::spawn_blocking(move || {
@@ -1233,15 +1236,7 @@ impl AndroidWebRtcSource {
fn request_refresh(&self) {}
fn request_keyframe(&self) {
- self.inner
- .metrics
- .keyframe_requests
- .fetch_add(1, Ordering::Relaxed);
- unsafe {
- ffi::xcw_native_h264_encoder_request_keyframe(
- self.inner.encoder_handle.load(Ordering::Acquire) as *mut c_void,
- );
- }
+ self.inner.request_keyframe();
}
}
@@ -1279,6 +1274,19 @@ unsafe extern "C" fn android_h264_encoder_frame_callback(
}
impl AndroidWebRtcSourceInner {
+ fn request_keyframe(&self) {
+ self.metrics
+ .keyframe_requests
+ .fetch_add(1, Ordering::Relaxed);
+ let encoder_handle = self.encoder_handle.load(Ordering::Acquire);
+ if encoder_handle == 0 {
+ return;
+ }
+ unsafe {
+ ffi::xcw_native_h264_encoder_request_keyframe(encoder_handle as *mut c_void);
+ }
+ }
+
fn handle_encoded_frame(&self, frame: &ffi::xcw_native_frame) {
let description = unsafe { copy_native_shared_bytes(frame.description) };
let Some(data) = (unsafe { copy_native_shared_bytes(frame.data) }) else {
From 22d14ce7393eaa758f6ade899ccddacd617c1937 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 15:52:19 -0400
Subject: [PATCH 07/29] Stabilize Android WebRTC readiness
---
.../src/features/stream/streamWorkerClient.ts | 30 +++++++++++++++++--
server/src/transport/webrtc.rs | 14 +++++++++
2 files changed, 41 insertions(+), 3 deletions(-)
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index 2212fdf2..4af8c856 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -353,6 +353,13 @@ interface WebCodecsVideoDecoderConstructor {
}>;
}
+interface WebRtcAnswerPayload extends RTCSessionDescriptionInit {
+ video?: {
+ height?: number;
+ width?: number;
+ };
+}
+
interface PendingVideoFrame {
frame: WebCodecsVideoFrame;
sequence: number | null;
@@ -1532,7 +1539,7 @@ class WebRtcStreamClient implements StreamClientBackend {
target,
localDescription,
);
- const answer = (await response.json()) as RTCSessionDescriptionInit;
+ const answer = (await response.json()) as WebRtcAnswerPayload;
if (generation !== this.connectGeneration) {
return;
}
@@ -1541,6 +1548,15 @@ class WebRtcStreamClient implements StreamClientBackend {
);
this.postDiagnostics(target, `${options.detailPrefix}-answer`);
await peerConnection.setRemoteDescription(answer);
+ if (
+ typeof answer.video?.width === "number" &&
+ typeof answer.video?.height === "number" &&
+ answer.video.width > 0 &&
+ answer.video.height > 0
+ ) {
+ this.syncCanvasSize(answer.video.width, answer.video.height);
+ this.reportVideoConfig(answer.video.width, answer.video.height);
+ }
}
destroy() {
@@ -1687,10 +1703,14 @@ class WebRtcStreamClient implements StreamClientBackend {
return;
}
const now = performance.now();
- const hasRenderedFrame = this.stats.renderedFrames > 0;
+ const hasMediaProgress =
+ this.hasRenderedFrame ||
+ this.stats.renderedFrames > 0 ||
+ this.stats.decodedFrames > 0 ||
+ this.stats.receivedPackets > 0;
const frameAgeMs =
this.lastVideoFrameAt > 0 ? now - this.lastVideoFrameAt : Infinity;
- if (!hasRenderedFrame) {
+ if (!hasMediaProgress) {
this.handleConnectionError(
target,
generation,
@@ -1699,6 +1719,10 @@ class WebRtcStreamClient implements StreamClientBackend {
);
return;
}
+ if (!this.hasRenderedFrame) {
+ this.scheduleFrameWatchdog(target, generation);
+ return;
+ }
if (frameAgeMs > WEBRTC_STALLED_FRAME_TIMEOUT_MS) {
this.sendControl({ snapshot: true, type: "streamControl" });
this.scheduleFrameWatchdog(target, generation);
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index 895b2b56..c5361d9d 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -89,6 +89,14 @@ pub struct WebRtcAnswerPayload {
pub sdp: String,
#[serde(rename = "type")]
pub kind: String,
+ pub video: WebRtcVideoMetadata,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WebRtcVideoMetadata {
+ pub width: u32,
+ pub height: u32,
}
#[derive(Debug, Clone, Serialize)]
@@ -289,6 +297,8 @@ pub async fn create_answer(
summarize_sdp_candidate_types(&local_description.sdp)
);
+ let first_frame_width = first_frame.width;
+ let first_frame_height = first_frame.height;
let (cancellation_token, cancellation) =
register_webrtc_media_stream(&udid, payload.client_id.as_deref(), true);
tokio::spawn(
@@ -309,6 +319,10 @@ pub async fn create_answer(
Ok(WebRtcAnswerPayload {
sdp: local_description.sdp,
kind: "answer".to_owned(),
+ video: WebRtcVideoMetadata {
+ width: first_frame_width,
+ height: first_frame_height,
+ },
})
}
From dcaa131b3a2860a0df925321286ed4d32edbc981 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 16:06:58 -0400
Subject: [PATCH 08/29] Use raw Android frame streaming
---
.../stream/streamWorkerClient.test.ts | 12 +-
.../src/features/stream/streamWorkerClient.ts | 408 +++++++++++++++++-
client/src/features/stream/useLiveStream.ts | 7 +-
server/src/android.rs | 11 +-
server/src/api/routes.rs | 6 +-
server/src/transport/webrtc.rs | 48 +--
6 files changed, 435 insertions(+), 57 deletions(-)
diff --git a/client/src/features/stream/streamWorkerClient.test.ts b/client/src/features/stream/streamWorkerClient.test.ts
index dc079f4b..9aac980a 100644
--- a/client/src/features/stream/streamWorkerClient.test.ts
+++ b/client/src/features/stream/streamWorkerClient.test.ts
@@ -7,22 +7,22 @@ import {
} from "./streamWorkerClient";
describe("streamWorkerClient", () => {
- it("forces Android emulator streams onto WebRTC even when H264 is requested", () => {
+ it("forces Android emulator streams onto the raw frame socket even when H264 is requested", () => {
const target = buildStreamTarget("android:emulator-5554", {
platform: "android-emulator",
transport: "h264",
});
- expect(preferredStreamBackend(target)).toBe("webrtc");
- expect(initialStreamBackend(target)).toBe("webrtc");
+ expect(preferredStreamBackend(target)).toBe("android-raw");
+ expect(initialStreamBackend(target)).toBe("android-raw");
});
- it("treats Android UDID prefixes as WebRTC-only stream targets", () => {
+ it("treats Android UDID prefixes as raw frame stream targets", () => {
const target = buildStreamTarget("android:Pixel_8", {
transport: "h264",
});
- expect(preferredStreamBackend(target)).toBe("webrtc");
- expect(initialStreamBackend(target)).toBe("webrtc");
+ expect(preferredStreamBackend(target)).toBe("android-raw");
+ expect(initialStreamBackend(target)).toBe("android-raw");
});
});
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index 4af8c856..05a6d3e0 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -33,6 +33,9 @@ const H264_WS_HEADER_BYTES = 40;
const H264_WS_MAGIC = 0x53444831;
const H264_WS_FLAG_KEYFRAME = 1 << 0;
const H264_WS_FLAG_CONFIG = 1 << 1;
+const ANDROID_RAW_HEADER_BYTES = 32;
+const ANDROID_RAW_MAGIC = 0x53444146;
+const ANDROID_RAW_FPS = 30;
const H264_WS_LOCAL_AUTO_PROFILES: StreamQualityPreset[] = [
"low",
"economy",
@@ -58,9 +61,10 @@ let activeWebRtcControlChannel: RTCDataChannel | null = null;
let activeWebRtcTelemetryChannel: RTCDataChannel | null = null;
let activeInputSocket: WebSocket | null = null;
let activeH264StreamSocket: WebSocket | null = null;
+let activeAndroidFrameSocket: WebSocket | null = null;
let activeStreamClient: StreamWorkerClient | null = null;
-export type StreamBackend = "h264-ws" | "webrtc";
+export type StreamBackend = "android-raw" | "h264-ws" | "webrtc";
export function sendWebRtcControlMessage(
encoded: string,
@@ -76,7 +80,8 @@ export function sendStreamClientStats(stats: unknown): boolean {
const encoded = JSON.stringify({ stats, type: "clientStats" });
return (
sendDataChannelMessage(activeWebRtcTelemetryChannel, encoded) ||
- sendWebSocketMessage(activeH264StreamSocket, encoded)
+ sendWebSocketMessage(activeH264StreamSocket, encoded) ||
+ sendWebSocketMessage(activeAndroidFrameSocket, encoded)
);
}
@@ -365,6 +370,14 @@ interface PendingVideoFrame {
sequence: number | null;
}
+interface AndroidRawFrame {
+ height: number;
+ pixels: Uint8ClampedArray;
+ sequence: number;
+ timestampUs: number;
+ width: number;
+}
+
function webCodecsConstructors(): {
EncodedVideoChunk?: WebCodecsEncodedVideoChunkConstructor;
VideoDecoder?: WebCodecsVideoDecoderConstructor;
@@ -1114,6 +1127,382 @@ class H264WebSocketStreamClient implements StreamClientBackend {
}
}
+class AndroidRawFrameStreamClient implements StreamClientBackend {
+ private canvas: HTMLCanvasElement | null = null;
+ private canvasContext: CanvasRenderingContext2D | null = null;
+ private connectGeneration = 0;
+ private frameWatchdogTimeout = 0;
+ private inputSocket: WebSocket | null = null;
+ private lastFrameAt = 0;
+ private reportedVideoHeight = 0;
+ private reportedVideoWidth = 0;
+ private shouldReconnect = false;
+ private stats: StreamStats = createEmptyStreamStats();
+ private streamSocket: WebSocket | null = null;
+ private streamTarget: StreamConnectTarget | null = null;
+ private stalledFrameWatchdogCount = 0;
+ private streamingReported = false;
+
+ constructor(
+ private readonly onMessage: (message: WorkerToMainMessage) => void,
+ ) {}
+
+ attachCanvas(canvasElement: HTMLCanvasElement) {
+ this.canvas = canvasElement;
+ this.canvasContext = canvasElement.getContext("2d", {
+ alpha: false,
+ desynchronized: true,
+ });
+ }
+
+ connect(target: StreamConnectTarget) {
+ this.disconnect();
+ if (!this.canvas) {
+ return;
+ }
+ const generation = ++this.connectGeneration;
+ this.shouldReconnect = true;
+ this.streamTarget = target;
+ this.streamingReported = false;
+ this.lastFrameAt = 0;
+ this.reportedVideoHeight = 0;
+ this.reportedVideoWidth = 0;
+ this.stalledFrameWatchdogCount = 0;
+ this.stats = createEmptyStreamStats();
+ this.stats.codec = "android-raw";
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Opening Android raw frame stream",
+ state: "connecting",
+ },
+ });
+
+ const socket = new WebSocket(
+ webSocketApiUrl(
+ `/api/simulators/${encodeURIComponent(target.udid)}/android/frames?max_fps=${ANDROID_RAW_FPS}`,
+ ),
+ );
+ socket.binaryType = "arraybuffer";
+ this.streamSocket = socket;
+ socket.addEventListener("open", () => {
+ if (socket === this.streamSocket) {
+ socket.binaryType = "arraybuffer";
+ activeAndroidFrameSocket = socket;
+ }
+ });
+ socket.addEventListener("message", (event) => {
+ if (socket !== this.streamSocket) {
+ return;
+ }
+ if (typeof event.data === "string") {
+ this.handleTextMessage(event.data);
+ return;
+ }
+ if (hasArrayBufferMethod(event.data)) {
+ void event.data.arrayBuffer().then((buffer) => {
+ if (socket === this.streamSocket) {
+ this.handleFrameMessage(buffer);
+ }
+ });
+ return;
+ }
+ this.handleFrameMessage(event.data);
+ });
+ socket.addEventListener("close", () => {
+ if (activeAndroidFrameSocket === socket) {
+ activeAndroidFrameSocket = null;
+ }
+ if (socket === this.streamSocket && this.shouldReconnect) {
+ this.handleError("Android raw frame stream closed.");
+ }
+ });
+ socket.addEventListener("error", () => {
+ if (socket === this.streamSocket) {
+ this.handleError("Android raw frame stream failed.");
+ }
+ });
+
+ this.connectInputSocket(target, generation);
+ this.scheduleFrameWatchdog(generation);
+ }
+
+ disconnect() {
+ this.shouldReconnect = false;
+ this.connectGeneration += 1;
+ this.clearFrameWatchdog();
+ this.streamSocket?.close();
+ if (activeAndroidFrameSocket === this.streamSocket) {
+ activeAndroidFrameSocket = null;
+ }
+ this.streamSocket = null;
+ this.inputSocket?.close();
+ if (activeInputSocket === this.inputSocket) {
+ activeInputSocket = null;
+ }
+ this.inputSocket = null;
+ this.streamTarget = null;
+ this.streamingReported = false;
+ this.lastFrameAt = 0;
+ this.reportedVideoHeight = 0;
+ this.reportedVideoWidth = 0;
+ this.stalledFrameWatchdogCount = 0;
+ }
+
+ destroy() {
+ this.disconnect();
+ }
+
+ clear() {
+ if (!this.canvas) {
+ return;
+ }
+ this.ensureCanvasContext()?.clearRect(
+ 0,
+ 0,
+ this.canvas.width,
+ this.canvas.height,
+ );
+ }
+
+ sendControl(payload: unknown): boolean {
+ if (
+ payload &&
+ typeof payload === "object" &&
+ "type" in payload &&
+ payload.type === "streamControl"
+ ) {
+ return true;
+ }
+ return sendWebSocketMessage(this.inputSocket, JSON.stringify(payload));
+ }
+
+ private connectInputSocket(target: StreamConnectTarget, generation: number) {
+ const socket = new WebSocket(
+ webSocketApiUrl(
+ `/api/simulators/${encodeURIComponent(target.udid)}/input`,
+ ),
+ );
+ this.inputSocket = socket;
+ activeInputSocket = socket;
+ socket.addEventListener("open", () => {
+ if (generation === this.connectGeneration) {
+ activeInputSocket = socket;
+ }
+ });
+ socket.addEventListener("close", () => {
+ if (activeInputSocket === socket) {
+ activeInputSocket = null;
+ }
+ });
+ socket.addEventListener("error", () => {
+ if (generation === this.connectGeneration) {
+ console.warn("Android input WebSocket failed.");
+ }
+ });
+ }
+
+ private handleTextMessage(text: string) {
+ try {
+ const message = JSON.parse(text) as { error?: string; type?: string };
+ if (message.error) {
+ this.handleError(message.error);
+ }
+ } catch {
+ // Text frames are diagnostics; binary frames carry pixels.
+ }
+ }
+
+ private handleFrameMessage(data: unknown) {
+ const frame = parseAndroidRawFrame(data);
+ if (!frame) {
+ this.stats.h264ParseFailures += 1;
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ return;
+ }
+ this.paintFrame(frame);
+ }
+
+ private paintFrame(frame: AndroidRawFrame) {
+ const canvas = this.canvas;
+ if (!canvas) {
+ return;
+ }
+ this.syncCanvasSize(frame.width, frame.height);
+ const startedAt = performance.now();
+ const image = new ImageData(frame.pixels, frame.width, frame.height);
+ this.ensureCanvasContext()?.putImageData(image, 0, 0);
+ const finishedAt = performance.now();
+ const previousFrameAt = this.lastFrameAt;
+ this.lastFrameAt = finishedAt;
+ this.stalledFrameWatchdogCount = 0;
+ this.reportVideoConfig(frame.width, frame.height);
+ this.stats.codec = "android-raw";
+ this.stats.decodedFrames += 1;
+ this.stats.renderedFrames += 1;
+ this.stats.receivedPackets += 1;
+ this.stats.frameSequence = frame.sequence;
+ this.stats.width = frame.width;
+ this.stats.height = frame.height;
+ this.stats.latestRenderMs = finishedAt - startedAt;
+ this.stats.maxRenderMs = Math.max(
+ this.stats.maxRenderMs,
+ this.stats.latestRenderMs,
+ );
+ this.stats.averageRenderMs =
+ this.stats.averageRenderMs <= 0
+ ? this.stats.latestRenderMs
+ : this.stats.averageRenderMs * 0.9 + this.stats.latestRenderMs * 0.1;
+ this.stats.latestFrameGapMs =
+ previousFrameAt > 0 ? finishedAt - previousFrameAt : 0;
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ if (!this.streamingReported) {
+ this.streamingReported = true;
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Android raw frame stream connected",
+ state: "streaming",
+ },
+ });
+ }
+ }
+
+ private ensureCanvasContext(): CanvasRenderingContext2D | null {
+ const canvas = this.canvas;
+ if (!canvas) {
+ this.canvasContext = null;
+ return null;
+ }
+ if (this.canvasContext?.canvas === canvas) {
+ return this.canvasContext;
+ }
+ this.canvasContext = canvas.getContext("2d", {
+ alpha: false,
+ desynchronized: true,
+ });
+ return this.canvasContext;
+ }
+
+ private syncCanvasSize(width: number, height: number) {
+ if (!this.canvas) {
+ return;
+ }
+ const nextWidth = Math.max(1, Math.round(width));
+ const nextHeight = Math.max(1, Math.round(height));
+ if (this.canvas.width !== nextWidth) {
+ this.canvas.width = nextWidth;
+ }
+ if (this.canvas.height !== nextHeight) {
+ this.canvas.height = nextHeight;
+ }
+ }
+
+ private reportVideoConfig(width: number, height: number) {
+ if (
+ this.reportedVideoWidth === width &&
+ this.reportedVideoHeight === height
+ ) {
+ return;
+ }
+ this.reportedVideoWidth = width;
+ this.reportedVideoHeight = height;
+ this.onMessage({ type: "video-config", size: { height, width } });
+ }
+
+ private scheduleFrameWatchdog(generation: number) {
+ this.clearFrameWatchdog();
+ this.frameWatchdogTimeout = window.setTimeout(
+ () => {
+ this.frameWatchdogTimeout = 0;
+ if (generation !== this.connectGeneration || !this.shouldReconnect) {
+ return;
+ }
+ if (this.lastFrameAt <= 0) {
+ this.handleError("Android raw frame stream did not render a frame.");
+ return;
+ }
+ const now = performance.now();
+ if (now - this.lastFrameAt > H264_WS_STALLED_FRAME_TIMEOUT_MS) {
+ this.stalledFrameWatchdogCount += 1;
+ if (this.stalledFrameWatchdogCount >= 2 && this.streamTarget) {
+ const target = this.streamTarget;
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Reconnecting stalled Android raw frame stream",
+ state: "connecting",
+ },
+ });
+ this.connect(target);
+ return;
+ }
+ } else {
+ this.stalledFrameWatchdogCount = 0;
+ }
+ this.scheduleFrameWatchdog(generation);
+ },
+ this.lastFrameAt > 0
+ ? H264_WS_STALLED_FRAME_TIMEOUT_MS
+ : H264_WS_FIRST_FRAME_TIMEOUT_MS,
+ );
+ }
+
+ private clearFrameWatchdog() {
+ if (!this.frameWatchdogTimeout) {
+ return;
+ }
+ window.clearTimeout(this.frameWatchdogTimeout);
+ this.frameWatchdogTimeout = 0;
+ }
+
+ private handleError(message: string) {
+ this.onMessage({
+ type: "status",
+ status: { error: message.replace(/\.$/, ""), state: "error" },
+ });
+ }
+}
+
+function parseAndroidRawFrame(data: unknown): AndroidRawFrame | null {
+ const bytes = bytesFromBinaryMessage(data);
+ if (!bytes || bytes.byteLength < ANDROID_RAW_HEADER_BYTES) {
+ return null;
+ }
+ const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
+ if (
+ view.getUint32(0, false) !== ANDROID_RAW_MAGIC ||
+ view.getUint8(4) !== 1
+ ) {
+ return null;
+ }
+ const width = view.getUint32(8, true);
+ const height = view.getUint32(12, true);
+ const sequence = view.getUint32(16, true);
+ const timestampUs =
+ view.getUint32(24, true) + view.getUint32(28, true) * 4294967296;
+ const pixelBytes = width * height * 4;
+ if (
+ width <= 0 ||
+ height <= 0 ||
+ bytes.byteLength < ANDROID_RAW_HEADER_BYTES + pixelBytes
+ ) {
+ return null;
+ }
+ return {
+ height,
+ pixels: new Uint8ClampedArray(
+ bytes.buffer as ArrayBuffer,
+ bytes.byteOffset + ANDROID_RAW_HEADER_BYTES,
+ pixelBytes,
+ ),
+ sequence,
+ timestampUs,
+ width,
+ };
+}
+
function parseH264WebSocketFrame(data: unknown): H264WebSocketFrame | null {
const bytes = bytesFromBinaryMessage(data);
if (!bytes || bytes.byteLength < H264_WS_HEADER_BYTES) {
@@ -2713,10 +3102,13 @@ export class StreamWorkerClient {
return;
}
this.backend?.destroy();
- this.backend =
- kind === "h264-ws"
- ? new H264WebSocketStreamClient(this.handleBackendMessage)
- : new WebRtcStreamClient(this.handleBackendMessage);
+ if (kind === "android-raw") {
+ this.backend = new AndroidRawFrameStreamClient(this.handleBackendMessage);
+ } else if (kind === "h264-ws") {
+ this.backend = new H264WebSocketStreamClient(this.handleBackendMessage);
+ } else {
+ this.backend = new WebRtcStreamClient(this.handleBackendMessage);
+ }
this.backendKind = kind;
if (this.canvasElement) {
this.backend.attachCanvas(this.canvasElement);
@@ -2755,7 +3147,7 @@ export function preferredStreamBackend(
target?: StreamConnectTarget | null,
): "auto" | StreamBackend {
if (isAndroidStreamTarget(target)) {
- return "webrtc";
+ return "android-raw";
}
const value =
target?.transport ??
@@ -2770,7 +3162,7 @@ export function initialStreamBackend(
target: StreamConnectTarget,
): StreamBackend {
if (isAndroidStreamTarget(target)) {
- return "webrtc";
+ return "android-raw";
}
const preferredBackend = preferredStreamBackend(target);
if (preferredBackend === "h264-ws") {
diff --git a/client/src/features/stream/useLiveStream.ts b/client/src/features/stream/useLiveStream.ts
index 60b980a2..26d1e9a1 100644
--- a/client/src/features/stream/useLiveStream.ts
+++ b/client/src/features/stream/useLiveStream.ts
@@ -437,7 +437,12 @@ export function useLiveStream({
runtimeInfo,
stats,
status,
- streamBackend: stats.codec === "h264-ws" ? "h264-ws" : "webrtc",
+ streamBackend:
+ stats.codec === "android-raw"
+ ? "android-raw"
+ : stats.codec === "h264-ws"
+ ? "h264-ws"
+ : "webrtc",
streamCanvasKey: `stream-${streamCanvasRevision}`,
};
}
diff --git a/server/src/android.rs b/server/src/android.rs
index c4687476..e83289a9 100644
--- a/server/src/android.rs
+++ b/server/src/android.rs
@@ -16,7 +16,7 @@ use tonic::transport::{Channel, Endpoint};
const ANDROID_ID_PREFIX: &str = "android:";
const DEFAULT_GRPC_PORT_BASE: u16 = 8554;
-const DEFAULT_ANDROID_STREAM_MAX_EDGE: u32 = 960;
+const ANDROID_GRPC_FRAME_MESSAGE_LIMIT: usize = 64 * 1024 * 1024;
const ANDROID_TOUCH_IDENTIFIER: i32 = 1;
const RUNNING_EMULATOR_CACHE_TTL: Duration = Duration::from_secs(2);
const AVD_GRPC_PORT_CACHE_TTL: Duration = Duration::from_secs(60);
@@ -460,11 +460,9 @@ impl AndroidBridge {
display: 0,
transport: None,
};
- if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if let (Some(max_edge), Ok(serial)) = (max_edge, self.resolve_serial(&avd_name)) {
if let Ok((width, height)) = self.screen_size_for_serial(&serial) {
- let max_edge = max_edge
- .unwrap_or(DEFAULT_ANDROID_STREAM_MAX_EDGE)
- .clamp(240, 2400) as f64;
+ let max_edge = max_edge.clamp(240, 2400) as f64;
let largest = width.max(height);
if largest > max_edge {
let scale = max_edge / largest;
@@ -483,7 +481,8 @@ impl AndroidBridge {
"Unable to connect to Android emulator gRPC: {error}"
))
})?;
- let mut grpc = tonic::client::Grpc::new(endpoint);
+ let mut grpc = tonic::client::Grpc::new(endpoint)
+ .max_decoding_message_size(ANDROID_GRPC_FRAME_MESSAGE_LIMIT);
grpc.ready().await.map_err(|error| {
AppError::native(format!("Android emulator gRPC is not ready: {error}"))
})?;
diff --git a/server/src/api/routes.rs b/server/src/api/routes.rs
index 72e30aa0..fbe4d177 100644
--- a/server/src/api/routes.rs
+++ b/server/src/api/routes.rs
@@ -1869,10 +1869,8 @@ async fn handle_android_frame_socket(
))
.await;
- let min_frame_gap = max_fps
- .filter(|fps| *fps > 0)
- .map(|fps| Duration::from_millis(1000 / u64::from(fps.min(60))))
- .unwrap_or_else(|| Duration::from_millis(83));
+ let fps = max_fps.unwrap_or(30).clamp(1, 30);
+ let min_frame_gap = Duration::from_micros(1_000_000 / u64::from(fps));
let mut last_sent_at = Instant::now() - min_frame_gap;
loop {
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index c5361d9d..2d3488ea 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -58,9 +58,7 @@ const WEBRTC_FULL_ICE_GATHER_TIMEOUT: Duration = Duration::from_secs(3);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
-const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 960;
-const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
-const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
+const ANDROID_WEBRTC_FPS: u64 = 30;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
OnceLock::new();
const MAX_WEBRTC_MEDIA_STREAMS_PER_UDID: usize = 16;
@@ -124,11 +122,14 @@ pub async fn create_answer(
"WebRTC preview supports media tracks only.",
));
}
- if let Some(stream_config) = payload.stream_config.as_ref() {
- apply_stream_quality_payload(&state, stream_config)?;
+ let is_android = android::is_android_id(&udid);
+ if !is_android {
+ if let Some(stream_config) = payload.stream_config.as_ref() {
+ apply_stream_quality_payload(&state, stream_config)?;
+ }
}
- let source = if android::is_android_id(&udid) {
+ let source = if is_android {
WebRtcVideoSource::Android(
AndroidWebRtcSource::start(
state.android.clone(),
@@ -593,13 +594,8 @@ fn attach_android_data_channel(
let _ = stream_control_tx.send(command);
}
WebRtcDataChannelMessage::StreamQuality { config } => {
- if let Err(error) = apply_stream_quality_payload(&state, &config) {
- warn!(
- "Android WebRTC stream quality update failed for {udid}: {error}"
- );
- } else {
- source.request_keyframe();
- }
+ let _ = config;
+ source.request_keyframe();
}
}
return;
@@ -1106,9 +1102,9 @@ impl AndroidWebRtcSource {
bridge: android::AndroidBridge,
metrics: Arc,
udid: String,
- max_edge: u32,
+ max_edge: Option,
) -> Result