This commit is contained in:
Anonymous
2026-03-16 16:37:59 +01:00
commit 042a5c83ff
27 changed files with 4862 additions and 0 deletions
+21
View File
@@ -0,0 +1,21 @@
[build]
target = "xtensa-esp32s3-espidf"
[target.xtensa-esp32s3-espidf]
linker = "ldproxy"
runner = "espflash flash --monitor"
rustflags = ["--cfg", "espidf_time64"]
[unstable]
build-std = ["std", "panic_abort"]
[env]
MCU="esp32s3"
# Must match the ESP-IDF version in the build container (docker.io/espressif/idf-rust:all_latest)
# The container ships v5.5.1 — use that, not v5.2.1, to avoid sdkconfig mismatch.
ESP_IDF_VERSION = { value = "v5.5.1" }
BINDGEN_EXTRA_CLANG_ARGS = { value = "-isystem/project/.embuild/espressif/tools/xtensa-esp-elf/esp-13.2.0_20230928/xtensa-esp-elf/lib/gcc/xtensa-esp-elf/13.2.0/include", force = false }
[esp-idf-sys]
esp_idf_version = "v5.5.1"
esp_idf_tools_install_dir = "global"
+7
View File
@@ -0,0 +1,7 @@
# Add this to .gitignore
target/
Cargo.lock
.embuild/
sdkconfig
sdkconfig.old
build/
+36
View File
@@ -0,0 +1,36 @@
[package]
name = "esp32-android-auto-nav"
description = "Android Auto navigation-only head unit for ESP32-S3 (WT32-SC01 Plus)"
version = "0.1.0"
edition = "2021"
resolver = "2"
rust-version = "1.82"
license = "LGPL-3.0-or-later"
[dependencies]
# ESP-IDF bindings (versions matched to ESP-IDF v5.5.1 in idf-rust:all_latest container)
esp-idf-svc = { version = "0.52", features = ["alloc"] }
esp-idf-hal = "0.46"
esp-idf-sys = { version = "0.37", features = ["binstart"] }
embedded-svc = "0.29"
# Serialization / Protocol
protobuf = "3.7"
serde = { version = "1.0", features = ["derive"] }
# Error handling
anyhow = "1.0"
# Logging
log = "0.4"
# Bitfield for frame headers (reused from upstream)
bitfield = "0.19"
[build-dependencies]
embuild = "0.33"
protobuf-codegen = "3.7"
# Remote ESP-IDF components (managed by idf component manager)
[[package.metadata.esp-idf-sys.extra_components]]
remote_component = { name = "espressif/mdns", version = "1.4" }
+16
View File
@@ -0,0 +1,16 @@
fn main() {
// ESP-IDF build integration
embuild::espidf::sysenv::output();
// Generate protobuf Rust bindings
let out_dir_env = std::env::var_os("OUT_DIR").unwrap();
let out_dir = std::path::Path::new(&out_dir_env);
protobuf_codegen::Codegen::new()
.out_dir(out_dir)
.pure()
.includes(["protobuf"])
.input("protobuf/Bluetooth.proto")
.input("protobuf/Wifi.proto")
.cargo_out_dir("protobuf")
.run_from_script();
}
Executable
+109
View File
@@ -0,0 +1,109 @@
#!/bin/bash
# Build script for ESP32 Android Auto Navigation Head Unit
# Uses Podman container with ESP-IDF Rust toolchain
set -e # Exit on error
BINARY_NAME="esp32-android-auto-nav"
# Parse arguments
BUILD_ONLY=false
for arg in "$@"; do
case $arg in
-b|--build-only|--no-flash)
BUILD_ONLY=true
shift
;;
-h|--help)
echo "Usage: ./build.sh [OPTIONS]"
echo "Options:"
echo " -b, --build-only, --no-flash Build only, skip flashing prompt"
echo " -h, --help Show this help message"
exit 0
;;
esac
done
echo "🔨 Building $BINARY_NAME (release)..."
echo ""
sudo podman run --rm \
--security-opt seccomp=unconfined \
--net=host \
--security-opt label=disable \
--user root \
-v $(pwd):/project \
-w /project \
docker.io/espressif/idf-rust:all_latest \
bash -c "export RUSTUP_HOME=/home/esp/.rustup && export CARGO_HOME=/home/esp/.cargo && source /home/esp/export-esp.sh && cargo build --release"
echo ""
echo "✅ Build complete!"
echo "📦 Binary: target/xtensa-esp32s3-espidf/release/$BINARY_NAME"
echo ""
if [ "$BUILD_ONLY" = true ]; then
echo "✅ Build complete (skipping flash prompt)."
echo ""
echo "To flash later:"
echo " cargo espflash flash target/xtensa-esp32s3-espidf/release/$BINARY_NAME --monitor"
exit 0
fi
# Ask if user wants to flash
read -p "⚡ Flash to device now? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
# Detect USB serial device
USB_DEVICE=""
if [ -e /dev/ttyUSB0 ]; then
USB_DEVICE="/dev/ttyUSB0"
elif [ -e /dev/ttyACM0 ]; then
USB_DEVICE="/dev/ttyACM0"
else
echo "🔍 No USB device auto-detected."
read -p "Enter device path (e.g., /dev/ttyUSB0): " USB_DEVICE
fi
if [ -z "$USB_DEVICE" ] || [ ! -e "$USB_DEVICE" ]; then
echo "❌ Device $USB_DEVICE not found. Skipping flash."
exit 1
fi
# Fix USB device permissions
echo "🔧 Setting device permissions..."
sudo chmod 666 "$USB_DEVICE"
echo "📡 Flashing to $USB_DEVICE..."
echo ""
sudo podman run --rm \
--security-opt seccomp=unconfined \
--security-opt label=disable \
--user root \
--privileged \
--device="$USB_DEVICE:$USB_DEVICE" \
-v $(pwd):/project \
-w /project \
docker.io/espressif/idf-rust:all_latest \
bash -c "export RUSTUP_HOME=/home/esp/.rustup && export CARGO_HOME=/home/esp/.cargo && source /home/esp/export-esp.sh && espflash flash --port $USB_DEVICE target/xtensa-esp32s3-espidf/release/$BINARY_NAME"
echo ""
echo "✅ Flash complete!"
echo ""
echo "📺 To view serial output:"
echo " screen $USB_DEVICE 115200"
echo " (Press Ctrl+A then K to exit)"
echo ""
echo "Or install espflash on host:"
echo " cargo install espflash"
echo " espflash monitor --port $USB_DEVICE"
echo ""
echo "✅ Flash complete!"
else
echo ""
echo "To flash later:"
echo " cargo espflash flash target/xtensa-esp32s3-espidf/release/$BINARY_NAME --monitor"
fi
+20
View File
@@ -0,0 +1,20 @@
dependencies:
espressif/mdns:
component_hash: d36b265164be5139f92de993f08f5ecaa0de0c0acbf84deee1f10bb5902d04ff
dependencies:
- name: idf
require: private
version: '>=5.0'
source:
registry_url: https://components.espressif.com/
type: service
version: 1.4.3
idf:
source:
type: idf
version: 5.5.1
direct_dependencies:
- espressif/mdns
manifest_hash: e7f1c61e6135aecdb38fc4be6c0a74a0b5dab2667145d593f168f8cd7832abc8
target: esp32s3
version: 2.0.0
+8
View File
@@ -0,0 +1,8 @@
format = "EspIdf"
[idf_format_args]
partition_table = "partitions.csv"
[flash]
mode = "dio"
size = "16MB"
+2
View File
@@ -0,0 +1,2 @@
dependencies:
espressif/esp_h264: "^1.3.0"
+4
View File
@@ -0,0 +1,4 @@
# Name, Type, SubType, Offset, Size
nvs, data, nvs, 0x9000, 0x6000
phy_init, data, phy, 0xf000, 0x1000
factory, app, factory, 0x10000, 0x3F0000
1 # Name Type SubType Offset Size
2 nvs data nvs 0x9000 0x6000
3 phy_init data phy 0xf000 0x1000
4 factory app factory 0x10000 0x3F0000
+66
View File
@@ -0,0 +1,66 @@
syntax = "proto2";
package androidauto.bluetooth;
enum MessageId {
BLUETOOTH_SOCKET_INFO_REQUEST = 1;
BLUETOOTH_NETWORK_INFO_REQUEST = 2;
BLUETOOTH_NETWORK_INFO_MESSAGE = 3;
BLUETOOTH_SOCKET_INFO_RESPONSE = 7;
}
enum SecurityMode {
UNKNOWN_SECURITY_MODE = 0;
OPEN = 1;
WEP_64 = 2;
WEP_128 = 3;
WPA_PERSONAL = 4;
WPA2_PERSONAL = 8;
WPA_WPA2_PERSONAL = 12;
WPA_ENTERPRISE = 20;
WPA2_ENTERPRISE = 24;
WPA_WPA2_ENTERPRISE = 28;
}
enum AccessPointType {
STATIC = 0;
DYNAMIC = 1;
}
message NetworkInfo
{
required string ssid = 1;
required string psk = 2;
required string mac_addr = 3;
required SecurityMode security_mode = 4;
required AccessPointType ap_type = 5;
}
message SocketInfoRequest
{
required string ip_address = 1;
optional uint32 port = 2;
}
enum Status {
STATUS_UNSOLICITED_MESSAGE = 1;
STATUS_SUCCESS = 0;
STATUS_NO_COMPATIBLE_VERSION = -1;
STATUS_WIFI_INACCESSIBLE_CHANNEL = -2;
STATUS_WIFI_INCORRECT_CREDENTIALS = -3;
STATUS_PROJECTION_ALREADY_STARTED = -4;
STATUS_WIFI_DISABLED = -5;
STATUS_WIFI_NOT_YET_STARTED = -6;
STATUS_INVALID_HOST = -7;
STATUS_NO_SUPPORTED_WIFI_CHANNELS = -8;
STATUS_INSTRUCT_USER_TO_CHECK_THE_PHONE = -9;
STATUS_PHONE_WIFI_DISABLED = -10;
STATUS_WIFI_NETWORK_UNAVAILABLE = -11;
}
message SocketInfoResponse
{
optional string ip_address = 1;
optional int32 port = 2;
required Status status = 3;
}
+953
View File
@@ -0,0 +1,953 @@
syntax = "proto2";
package androidauto.wifi;
message NonspecificMessage
{
enum Enum
{
VERSION_REQUEST = 0x0001;
}
}
enum CommonMessage
{
CHANNEL_OPEN_REQUEST = 0x0007;
CHANNEL_OPEN_RESPONSE = 0x0008;
}
enum ControlMessage
{
MESSAGE_NONE = 0x0000;
VERSION_REQUEST = 0x0001;
VERSION_RESPONSE = 0x0002;
SSL_HANDSHAKE = 0x0003;
AUTH_COMPLETE = 0x0004;
SERVICE_DISCOVERY_REQUEST = 0x0005;
SERVICE_DISCOVERY_RESPONSE = 0x0006;
PING_REQUEST = 0x000b;
PING_RESPONSE = 0x000c;
NAVIGATION_FOCUS_REQUEST = 0x000d;
NAVIGATION_FOCUS_RESPONSE = 0x000e;
SHUTDOWN_REQUEST = 0x000f;
SHUTDOWN_RESPONSE = 0x0010;
VOICE_SESSION_REQUEST = 0x0011;
AUDIO_FOCUS_REQUEST = 0x0012;
AUDIO_FOCUS_RESPONSE = 0x0013;
}
message AVChannelMessage
{
enum Enum
{
AV_MEDIA_WITH_TIMESTAMP_INDICATION = 0x0000;
AV_MEDIA_INDICATION = 0x0001;
SETUP_REQUEST = 0x8000;
START_INDICATION = 0x8001;
STOP_INDICATION = 0x8002;
SETUP_RESPONSE = 0x8003;
AV_MEDIA_ACK_INDICATION = 0x8004;
AV_INPUT_OPEN_REQUEST = 0x8005;
AV_INPUT_OPEN_RESPONSE = 0x8006;
VIDEO_FOCUS_REQUEST = 0x8007;
VIDEO_FOCUS_INDICATION = 0x8008;
}
}
message InputChannelMessage
{
enum Enum
{
NONE = 0x0000;
INPUT_EVENT_INDICATION = 0x8001;
BINDING_REQUEST = 0x8002;
BINDING_RESPONSE = 0x8003;
}
}
enum MessageType {
SPECIFIC = 0;
CONTROL = 4;
}
enum AuthCompleteIndicationStatus
{
OK = 0;
FAIL = 1;
}
message AuthCompleteIndication
{
required AuthCompleteIndicationStatus status = 1;
}
message ServiceDiscoveryRequest
{
required string device_name = 4;
required string device_brand = 5;
}
message SensorType
{
enum Enum
{
NONE = 0;
LOCATION = 1;
COMPASS = 2;
CAR_SPEED = 3;
RPM = 4;
ODOMETER = 5;
FUEL_LEVEL = 6;
PARKING_BRAKE = 7;
GEAR = 8;
DIAGNOSTICS = 9;
NIGHT_DATA = 10;
ENVIRONMENT = 11;
HVAC = 12;
DRIVING_STATUS = 13;
DEAD_RECONING = 14;
PASSENGER = 15;
DOOR = 16;
LIGHT = 17;
TIRE = 18;
ACCEL = 19;
GYRO = 20;
GPS = 21;
}
}
message Sensor
{
required SensorType.Enum type = 1;
}
message SensorChannel
{
repeated Sensor sensors = 1;
}
message SensorChannelMessage
{
enum Enum
{
NONE = 0x0000;
SENSOR_START_REQUEST = 0x8001;
SENSOR_START_RESPONSE = 0x8002;
SENSOR_EVENT_INDICATION = 0x8003;
}
}
message SensorStartRequestMessage
{
required SensorType.Enum sensor_type = 1;
required int64 refresh_interval = 2;
}
message SensorStartResponseMessage
{
required Status.Enum status = 1;
}
message GPSLocation
{
required uint64 timestamp = 1;
required int32 latitude = 2;
required int32 longitude = 3;
required uint32 accuracy = 4;
optional int32 altitude = 5;
optional int32 speed = 6;
optional int32 bearing = 7;
}
message Compass
{
required int32 bearing =1;
required int32 pitch = 2;
required int32 roll = 3;
}
message Speed
{
required int32 speed = 1;
optional bool cruise_engaged = 2;
optional bool cruise_set_speed = 3;
}
message RPM
{
required int32 rpm = 1;
}
message Odometer
{
required int32 total_mileage = 1;
required int32 trip_mileage = 2;
}
message FuelLevel
{
required int32 fuel_level = 1;
required int32 range = 2;
required bool low_fuel = 3;
}
message ParkingBrake
{
required bool parking_brake = 1;
}
message NightMode
{
required bool is_night = 1;
}
message Environment
{
required int32 temperature =1;
required int32 pressure = 2;
required int32 rain = 3;
}
message HVAC
{
required int32 target_temperature = 1;
required int32 current_temperature = 2;
}
enum DrivingStatusEnum
{
UNRESTRICTED = 0;
NO_VIDEO = 1;
NO_KEYBOARD_INPUT = 2;
NO_VOICE_INPUT = 4;
NO_CONFIG = 8;
LIMIT_MESSAGE_LEN = 16;
FULLY_RESTRICTED = 31;
}
message DrivingStatus
{
required int32 status = 1;
}
message SteeringWheel
{
required int32 steering_angle = 1;
required int32 wheel_speed = 2;
}
message Passenger
{
required bool passenger_present = 1;
}
message Door
{
required bool hood_open = 1;
required bool boot_open = 2;
repeated bool door_open = 3;
}
message Light
{
required HeadlightStatus.Enum headlight = 1;
required IndicatorStatus.Enum indicator = 2;
required bool hazard_light_on = 3;
}
message Accel
{
required int32 acceleration_x = 1;
required int32 acceleration_y = 2;
required int32 acceleration_z = 3;
}
message Gyro
{
required int32 rotation_speed_x = 1;
required int32 rotation_speed_y = 2;
required int32 rotation_speed_z = 3;
}
message HeadlightStatus
{
enum Enum
{
STATE_0 = 0;
STATE_1 = 1;
STATE_2 = 2;
STATE_3 = 3;
}
}
message IndicatorStatus
{
enum Enum
{
STATE_0 = 0;
STATE_1 = 1;
STATE_2 = 2;
STATE_3 = 3;
}
}
message Diagnostics
{
required bytes diagnostics = 1;
}
message Gear
{
enum Enum
{
NEUTRAL = 0;
FIRST = 1;
SECOND = 2;
THIRD = 3;
FOURTH = 4;
FIFTH = 5;
SIXTH = 6;
SEVENTH = 7;
EIGHTH = 8;
NINTH = 9;
TENTH = 10;
DRIVE = 100;
PARK = 101;
REVERSE = 102;
}
required Gear.Enum gear = 1;
}
message SensorEventIndication
{
repeated GPSLocation gps_location = 1;
repeated Compass compass = 2;
repeated Speed speed = 3;
repeated RPM rpm = 4;
repeated Odometer odometer = 5;
repeated FuelLevel fuel_level = 6;
repeated ParkingBrake parking_brake = 7;
repeated Gear gear = 8;
repeated Diagnostics diagnostics = 9;
repeated NightMode night_mode = 10;
repeated Environment environment = 11;
repeated HVAC hvac = 12;
repeated DrivingStatus driving_status = 13;
repeated SteeringWheel steering_wheel = 14;
repeated Passenger passenger = 15;
repeated Door door = 16;
repeated Light light = 17;
repeated Accel accel = 19;
repeated Gyro gyro = 20;
}
message AVStreamType
{
enum Enum
{
NONE = 0;
AUDIO = 1;
VIDEO = 3;
}
}
message AudioType
{
enum Enum
{
NONE = 0;
SPEECH = 1;
SYSTEM = 2;
MEDIA = 3;
ALARM = 4;
}
}
message AudioConfig
{
required uint32 sample_rate = 1;
required uint32 bit_depth = 2;
required uint32 channel_count = 3;
}
message VideoResolution
{
enum Enum
{
NONE = 0;
_480p = 1;
_720p = 2;
_1080p = 3;
_1440p = 4;
_720p_p = 5;
_1080pp = 6;
_108s0p_p = 7;
}
}
message VideoFPS
{
enum Enum
{
NONE = 0;
_30 = 1;
_60 = 2;
}
}
message VideoConfig
{
required VideoResolution.Enum video_resolution = 1;
required VideoFPS.Enum video_fps = 2;
required uint32 margin_width = 3;
required uint32 margin_height = 4;
required uint32 dpi = 5;
optional uint32 additional_depth = 6;
}
message AVChannel
{
required AVStreamType.Enum stream_type = 1;
optional AudioType.Enum audio_type = 2;
repeated AudioConfig audio_configs = 3;
repeated VideoConfig video_configs = 4;
optional bool available_while_in_call = 5;
}
message TouchConfig
{
required uint32 width = 1;
required uint32 height = 2;
}
message InputChannel
{
repeated uint32 supported_keycodes = 1;
optional TouchConfig touch_screen_config = 2;
optional TouchConfig touch_pad_config = 3;
}
message AVInputChannel
{
required AVStreamType.Enum stream_type = 1;
required AudioConfig audio_config = 2;
optional bool available_while_in_call = 3;
}
message BluetoothPairingMethod
{
enum Enum
{
NONE = 0;
UNK_1 = 1;
A2DP = 2;
UNK_3 = 3;
HFP = 4;
}
}
message BluetoothChannel
{
required string adapter_address = 1;
repeated BluetoothPairingMethod.Enum supported_pairing_methods = 2;
}
message NavigationTurnType
{
enum Enum
{
UNKNOWN = 0;
IMAGE = 1;
ENUM = 2;
}
}
message NavigationImageOptions
{
required int32 width = 1;
required int32 height = 2;
required int32 colour_depth_bits = 3;
required int32 dunno = 4;
}
message NavigationChannel
{
required uint32 minimum_interval_ms = 1;
required NavigationTurnType.Enum type = 2;
required NavigationImageOptions image_options = 3;
}
message MediaInfoChannel
{
}
message VendorExtensionChannel
{
required string name = 1;
repeated string package_white_list = 2;
optional bytes data = 3;
}
message WifiChannel
{
required string ssid = 1;
}
message ChannelDescriptor
{
required uint32 channel_id = 1;
optional SensorChannel sensor_channel = 2;
optional AVChannel av_channel = 3;
optional InputChannel input_channel = 4;
optional AVInputChannel av_input_channel = 5;
optional BluetoothChannel bluetooth_channel = 6;
optional NavigationChannel navigation_channel = 8;
optional MediaInfoChannel media_infoChannel = 9;
optional VendorExtensionChannel vendor_extension_channel = 12;
optional WifiChannel wifi_channel=16;
}
message ServiceDiscoveryResponse
{
repeated ChannelDescriptor channels = 1;
required string head_unit_name = 2;
required string car_model = 3;
required string car_year = 4;
required string car_serial = 5;
required bool left_hand_drive_vehicle = 6;
required string headunit_manufacturer = 7;
required string headunit_model = 8;
required string sw_build = 9;
required string sw_version = 10;
required bool can_play_native_media_during_vr = 11;
optional bool hide_clock = 12;
}
message AudioFocusType
{
enum Enum
{
NONE = 0;
GAIN = 1;
GAIN_TRANSIENT = 2;
GAIN_NAVI = 3;
RELEASE = 4;
}
}
message AudioFocusRequest
{
required AudioFocusType.Enum audio_focus_type = 1;
}
message AudioFocusState
{
enum Enum
{
NONE = 0;
GAIN = 1;
GAIN_TRANSIENT = 2;
LOSS = 3;
LOSS_TRANSIENT_CAN_DUCK = 4;
LOSS_TRANSIENT = 5;
GAIN_MEDIA_ONLY = 6;
GAIN_TRANSIENT_GUIDANCE_ONLY = 7;
}
}
message AudioFocusResponse
{
required AudioFocusState.Enum audio_focus_state = 1;
}
message ChannelOpenRequest
{
required int32 priority = 1;
required int32 channel_id = 2;
}
message Status
{
enum Enum
{
OK = 0;
FAIL = 1;
}
}
message ChannelOpenResponse
{
required Status.Enum status = 1;
}
message PingRequest
{
required int64 timestamp = 1;
}
message PingResponse
{
required int64 timestamp = 1;
}
message AVChannelSetupRequest
{
required uint32 config_index = 1;
}
message AVChannelSetupStatus
{
enum Enum
{
NONE = 0;
FAIL = 1;
OK = 2;
}
}
message AVChannelSetupResponse
{
required AVChannelSetupStatus.Enum media_status = 1;
required uint32 max_unacked = 2;
repeated uint32 configs = 3;
}
message BindingRequest
{
repeated int32 scan_codes = 1;
}
message BindingResponse
{
required Status.Enum status = 1;
}
message VideoFocusMode
{
enum Enum
{
NONE = 0;
FOCUSED = 1;
UNFOCUSED = 2;
}
}
message VideoFocusReason
{
enum Enum
{
NONE = 0;
UNK_1 = 1;
UNK_2 = 2;
}
}
message VideoFocusRequest
{
optional int32 disp_index = 1;
required VideoFocusMode.Enum focus_mode = 2;
required VideoFocusReason.Enum focus_reason = 3;
}
message VideoFocusIndication
{
required VideoFocusMode.Enum focus_mode = 1;
required bool unrequested = 2;
}
message AVChannelStartIndication
{
required int32 session = 1;
required uint32 config = 2;
}
message AVChannelStopIndication
{
}
message MediaInfoChannelMessage
{
enum Enum
{
NONE = 0x0000;
PLAYBACK = 0x8001;
METADATA = 0x8003;
}
}
message MediaInfoChannelPlaybackData
{
enum PlaybackState
{
NONE = 0x0000;
TRACK_CHANGE = 1;
PLAY = 2;
PAUSE = 3;
}
required PlaybackState playback_state = 1;
required string media_source = 2;
required int32 track_progress = 3;
required int32 unknown1 = 4;
required int32 unknown2 = 5;
required int32 unknown3 = 6;
}
message MediaInfoChannelMetadataData
{
required string track_name = 1;
optional string artist_name = 2;
optional string album_name = 3;
optional bytes album_art = 4;
required int32 track_length = 6;
required int32 unknown1 = 7;
}
message BluetoothChannelMessage
{
enum Enum
{
NONE = 0x0000;
PAIRING_REQUEST = 0x8001;
PAIRING_RESPONSE = 0x8002;
AUTH_DATA = 0x8003;
}
}
message BluetoothPairingRequest
{
required string phone_address = 1;
required BluetoothPairingMethod.Enum pairing_method = 2;
}
message BluetoothPairingStatus
{
enum Enum
{
NONE = 0;
OK = 1;
FAIL = 2;
}
}
message BluetoothPairingResponse
{
required bool already_paired = 1;
required BluetoothPairingStatus.Enum status = 2;
}
message AVMediaAckIndication
{
required int32 session = 1;
required uint32 value = 2;
}
message TouchAction
{
enum Enum
{
PRESS = 0;
RELEASE = 1;
DRAG = 2;
POINTER_DOWN = 5;
POINTER_UP = 6;
}
}
message AbsoluteInputEvent
{
required uint32 scan_code = 1;
required int32 value = 2;
}
message AbsoluteInputEvents
{
repeated AbsoluteInputEvent absolute_input_events = 1;
}
message RelativeInputEvent
{
required uint32 scan_code = 1;
required int32 delta = 2;
}
message RelativeInputEvents
{
repeated RelativeInputEvent relative_input_events = 1;
}
message ButtonEvent
{
required uint32 scan_code = 1;
required bool is_pressed = 2;
optional uint32 meta = 3;
optional bool long_press = 4;
}
message ButtonEvents
{
repeated ButtonEvent button_events = 1;
}
message TouchLocation
{
required uint32 x = 1;
required uint32 y = 2;
required uint32 pointer_id = 3;
}
message TouchEvent
{
repeated TouchLocation touch_location = 1;
optional uint32 action_index = 2;
required TouchAction.Enum touch_action = 3;
}
message InputEventIndication
{
required uint64 timestamp = 1;
optional int32 disp_channel = 2;
optional TouchEvent touch_event = 3;
optional ButtonEvents button_event = 4;
optional AbsoluteInputEvents absolute_input_event = 5;
optional RelativeInputEvents relative_input_event = 6;
}
message ShutdownReason
{
enum Enum
{
NONE = 0;
QUIT = 1;
}
}
message ShutdownRequest
{
required ShutdownReason.Enum reason = 1;
}
message ShutdownResponse
{
}
message NavigationFocusRequest
{
required uint32 type = 1;
}
message NavigationFocusResponse
{
required uint32 type = 1;
}
message NavigationChannelMessage
{
enum Enum
{
NONE = 0x0000;
STATUS = 0x8003;
TURN_EVENT = 0x8004;
DISTANCE_EVENT = 0x8005;
}
}
message NavigationStatus
{
required Enum status = 1;
enum Enum
{
UNAVAILABLE = 0;
ACTIVE = 1;
INACTIVE = 2;
REROUTING = 3;
}
}
message ManeuverDirection
{
enum Enum
{
UNKNOWN = 0;
LEFT = 1;
RIGHT = 2;
UNSPECIFIED = 3;
}
}
message ManeuverType
{
enum Enum
{
UNKNOWN = 0;
DEPART = 1;
NAME_CHANGE = 2;
SLIGHT_TURN = 3;
TURN = 4;
SHARP_TURN = 5;
U_TURN = 6;
ON_RAMP = 7;
OFF_RAMP = 8;
FORK = 9;
MERGE = 10;
ROUNDABOUT_ENTER = 11;
ROUNDABOUT_EXIT = 12;
ROUNDABOUT_ENTER_AND_EXIT = 13;
STRAIGHT = 14;
FERRY_BOAT = 16;
FERRY_TRAIN = 17;
DESTINATION = 19;
}
}
message DistanceUnit
{
enum Enum
{
UNKNOWN = 0;
METERS = 1;
KILOMETERS = 2;
KILOMETERS_PARTIAL = 3;
MILES = 4;
MILES_PARTIAL = 5;
FEET = 6;
YARDS = 7;
}
}
message NavigationDistanceEvent
{
required uint32 meters = 1;
required uint32 timeToStepSeconds = 2;
required uint32 distanceToStepMillis = 3;
required DistanceUnit.Enum distanceUnit = 4;
}
message NavigationTurnEvent
{
required string street_name = 1;
required ManeuverDirection.Enum maneuverDirection = 2;
required ManeuverType.Enum maneuverType = 3;
required bytes turnImage = 4;
required uint32 roundaboutExitNumber = 5;
required uint32 roundaboutExitAngle = 6;
}
message VoiceSessionRequest
{
required uint32 type = 1; // 1 = start, 2 = stop
}
message AVInputOpenRequest
{
required bool open = 1;
optional bool anc = 2;
optional bool ec = 3;
optional int32 max_unacked = 4;
}
message AVInputOpenResponse
{
required int32 session = 1;
required uint32 value = 2;
}
+3
View File
@@ -0,0 +1,3 @@
[toolchain]
channel = "esp"
components = ["rust-src"]
+60
View File
@@ -0,0 +1,60 @@
# ESP-IDF sdkconfig defaults for Android Auto Nav Head Unit
# Target: ESP32-S3, WT32-SC01 Plus
# Main task stack — needs to be large for TLS + protobuf
CONFIG_ESP_MAIN_TASK_STACK_SIZE=32768
# PSRAM (8MB on WT32-SC01 Plus — quad SPI, NOT octal)
CONFIG_SPIRAM=y
CONFIG_SPIRAM_MODE_QUAD=y
CONFIG_SPIRAM_SPEED_80M=y
# Use MALLOC mode: regular malloc() falls back to PSRAM when internal SRAM is full.
# This is critical because Rust's Vec/String use malloc, not heap_caps_malloc.
CONFIG_SPIRAM_USE_MALLOC=y
# Allocations <= 4KB stay in fast internal SRAM; larger ones go to PSRAM automatically
CONFIG_SPIRAM_MALLOC_ALWAYSINTERNAL=4096
# Reserve some internal memory for DMA/stack (default is fine but be explicit)
CONFIG_SPIRAM_MALLOC_RESERVE_INTERNAL=32768
# Flash (16MB on WT32-SC01 Plus)
CONFIG_ESPTOOLPY_FLASHSIZE_16MB=y
CONFIG_ESPTOOLPY_FLASHMODE_QIO=y
# Bluetooth — BLE only (ESP32-S3 does NOT support Bluetooth Classic)
# Android Auto wireless pairing via BT SPP is not available on this SoC.
CONFIG_BT_ENABLED=y
CONFIG_BT_BLE_ENABLED=y
CONFIG_BT_NIMBLE_ENABLED=y
# WiFi — required for Android Auto data transport
CONFIG_ESP_WIFI_ENABLED=y
CONFIG_ESP_WIFI_STATIC_RX_BUFFER_NUM=10
CONFIG_ESP_WIFI_DYNAMIC_RX_BUFFER_NUM=32
CONFIG_ESP_WIFI_DYNAMIC_TX_BUFFER_NUM=32
# H.264 software decoder (esp_h264 component)
# Dual-task decoder for better FPS on ESP32-S3
CONFIG_ESP_H264_DECODER_IRAM=1
CONFIG_ESP_H264_DUAL_TASK=1
# TLS — mbedtls for Android Auto TLS handshake
CONFIG_MBEDTLS_TLS_CLIENT=y
CONFIG_MBEDTLS_TLS_SERVER=y
CONFIG_MBEDTLS_SSL_ALPN=y
CONFIG_MBEDTLS_CERTIFICATE_BUNDLE=y
CONFIG_MBEDTLS_CERTIFICATE_BUNDLE_DEFAULT_FULL=y
CONFIG_MBEDTLS_HARDWARE_AES=y
CONFIG_MBEDTLS_HARDWARE_SHA=y
CONFIG_MBEDTLS_KEY_EXCHANGE_RSA=y
# TCP/IP
CONFIG_LWIP_MAX_SOCKETS=10
CONFIG_LWIP_TCP_SND_BUF_DEFAULT=8192
CONFIG_LWIP_TCP_WND_DEFAULT=8192
CONFIG_LWIP_TCP_RECVMBOX_SIZE=32
# Logging
CONFIG_LOG_DEFAULT_LEVEL_INFO=y
# mDNS — for Android Auto service discovery (_androidauto._tcp)
CONFIG_MDNS_MAX_SERVICES=4
+167
View File
@@ -0,0 +1,167 @@
//! Bluetooth RFCOMM service for Android Auto wireless setup.
//!
//! **ESP32-S3 Limitation**: The ESP32-S3 SoC only supports Bluetooth Low
//! Energy (BLE), NOT Bluetooth Classic. Android Auto wireless pairing
//! requires Bluetooth Classic SPP (Serial Port Profile) for the initial
//! handshake where WiFi credentials are exchanged.
//!
//! Since BT Classic is unavailable on ESP32-S3, this module provides:
//! - The wire protocol implementation (generic over Read+Write) for
//! future use with an external BT Classic module or different SoC.
//! - A `start_bluetooth_service()` that cleanly skips BT pairing.
//!
//! Current workaround: The phone must manually join the ESP32's WiFi AP
//! and connect to the advertised TCP port directly.
//!
//! Future options:
//! - BLE advertising of WiFi AP credentials for companion app
//! - mDNS/DNS-SD service discovery (`_androidauto._tcp`)
//! - External BT Classic module via UART (e.g., RN42, HC-05)
//! - Use original ESP32 (which has BT Classic) instead of ESP32-S3
use anyhow::{Context, Result, bail};
use protobuf::Message;
/// The Android Auto Bluetooth UUID (SPP profile).
pub const ANDROID_AUTO_UUID: &str = "4de17a00-52cb-11e6-bdf4-0800200c9a66";
/// RFCOMM channel for Android Auto.
pub const RFCOMM_CHANNEL: u8 = 22;
/// WiFi network information to relay to the phone.
#[derive(Debug, Clone)]
pub struct WifiCredentials {
pub ssid: String,
pub password: String,
pub mac_addr: String,
pub ip_addr: String,
pub port: u16,
}
/// Handle a Bluetooth client connection.
///
/// Protocol (from upstream analysis):
/// 1. We send SocketInfoRequest (our IP + port)
/// 2. Phone sends NetworkInfoRequest
/// 3. We reply with NetworkInfoMessage (WiFi SSID + password)
/// 4. Phone sends SocketInfoResponse (status)
/// 5. Connection ends, phone connects via WiFi
pub fn handle_bluetooth_client<S: std::io::Read + std::io::Write>(
stream: &mut S,
wifi_creds: &WifiCredentials,
) -> Result<()> {
use crate::proto::Bluetooth;
use protobuf::Enum;
log::info!("Android Auto: Bluetooth client connected");
// Step 1: Send SocketInfoRequest with our TCP listen address
let mut socket_req = Bluetooth::SocketInfoRequest::new();
socket_req.set_ip_address(wifi_creds.ip_addr.clone());
socket_req.set_port(wifi_creds.port as u32);
let msg_bytes = socket_req.write_to_bytes().context("serializing SocketInfoRequest")?;
let msg_type = Bluetooth::MessageId::BLUETOOTH_SOCKET_INFO_REQUEST as u16;
send_bt_message(stream, msg_type, &msg_bytes)?;
// Step 2-4: Message loop
loop {
let (msg_type, msg_data) = recv_bt_message(stream)?;
let Some(msg_id) = Bluetooth::MessageId::from_i32(msg_type as i32) else {
log::warn!("Unknown Bluetooth message type: 0x{:x}", msg_type);
continue;
};
match msg_id {
Bluetooth::MessageId::BLUETOOTH_NETWORK_INFO_REQUEST => {
log::info!("Phone requests network info, sending WiFi credentials");
let mut net_info = Bluetooth::NetworkInfo::new();
net_info.set_ssid(wifi_creds.ssid.clone());
net_info.set_psk(wifi_creds.password.clone());
net_info.set_mac_addr(wifi_creds.mac_addr.clone());
net_info.set_security_mode(Bluetooth::SecurityMode::WPA2_PERSONAL);
net_info.set_ap_type(Bluetooth::AccessPointType::STATIC);
let msg_bytes = net_info.write_to_bytes().context("serializing NetworkInfo")?;
let msg_type = Bluetooth::MessageId::BLUETOOTH_NETWORK_INFO_MESSAGE as u16;
send_bt_message(stream, msg_type, &msg_bytes)?;
}
Bluetooth::MessageId::BLUETOOTH_SOCKET_INFO_RESPONSE => {
let resp = Bluetooth::SocketInfoResponse::parse_from_bytes(&msg_data)
.context("parsing SocketInfoResponse")?;
log::info!("Phone responded with status: {:?}", resp.status());
if resp.status() == Bluetooth::Status::STATUS_SUCCESS {
log::info!("Bluetooth handshake complete, phone will connect via WiFi");
return Ok(());
}
// Phone reported an error — keep waiting for retry
log::warn!("Phone reported non-success status, waiting...");
}
Bluetooth::MessageId::BLUETOOTH_SOCKET_INFO_REQUEST => {
log::info!("Got socket info request from phone, handshake done");
return Ok(());
}
_ => {
log::warn!("Unhandled Bluetooth message: {:?}", msg_id);
}
}
std::thread::sleep(std::time::Duration::from_millis(100));
}
}
/// Send a Bluetooth protocol message: [len: u16] [type: u16] [payload...]
fn send_bt_message<W: std::io::Write>(
stream: &mut W,
msg_type: u16,
payload: &[u8],
) -> Result<()> {
let len = payload.len() as u16;
stream.write_all(&len.to_be_bytes())?;
stream.write_all(&msg_type.to_be_bytes())?;
stream.write_all(payload)?;
stream.flush()?;
Ok(())
}
/// Receive a Bluetooth protocol message. Returns (msg_type, payload).
fn recv_bt_message<R: std::io::Read>(stream: &mut R) -> Result<(u16, Vec<u8>)> {
let mut len_buf = [0u8; 2];
stream.read_exact(&mut len_buf)?;
let len = u16::from_be_bytes(len_buf);
let mut type_buf = [0u8; 2];
stream.read_exact(&mut type_buf)?;
let msg_type = u16::from_be_bytes(type_buf);
let mut payload = vec![0u8; len as usize];
stream.read_exact(&mut payload)?;
Ok((msg_type, payload))
}
// ---------------------------------------------------------------------------
// ESP-IDF Bluetooth — ESP32-S3 does NOT support BT Classic
// ---------------------------------------------------------------------------
/// Initialize Bluetooth service for Android Auto pairing.
///
/// **ESP32-S3**: Bluetooth Classic SPP is not supported by the hardware.
/// This function logs the limitation and returns Ok immediately.
/// The connection cycle proceeds directly to TCP listening.
///
/// On original ESP32 (which supports BT Classic), this would:
/// 1. Initialize Bluedroid in Classic mode
/// 2. Register SPP service with `ANDROID_AUTO_UUID`
/// 3. Accept incoming RFCOMM connection
/// 4. Call `handle_bluetooth_client()` to exchange WiFi credentials
pub fn start_bluetooth_service(wifi_creds: &WifiCredentials) -> Result<()> {
log::warn!("ESP32-S3 does not support Bluetooth Classic (SPP)");
log::warn!("Android Auto wireless BT pairing is not available");
log::info!(
"Phone must join WiFi AP '{}' and connect to {}:{} directly",
wifi_creds.ssid, wifi_creds.ip_addr, wifi_creds.port
);
Ok(())
}
+83
View File
@@ -0,0 +1,83 @@
//! Hard-coded default certificates for the Android Auto protocol.
//!
//! These are the same certificates used by the upstream `android-auto` crate.
//! The client certificate identifies this head unit to the phone during TLS.
/// The server cert root certificate for the Android Auto server (the phone).
/// Issued by "Google Automotive Link" CA.
pub const AAUTO_CERT: &[u8] = b"-----BEGIN CERTIFICATE-----\n\
MIIDiTCCAnGgAwIBAgIJAMFO56WkVE1CMA0GCSqGSIb3DQEBBQUAMFsxCzAJBgNV\n\
BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1Nb3VudGFpbiBW\n\
aWV3MR8wHQYDVQQKDBZHb29nbGUgQXV0b21vdGl2ZSBMaW5rMB4XDTE0MDYwNjE4\n\
MjgxOVoXDTQ0MDYwNTE4MjgxOVowWzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNh\n\
bGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxHzAdBgNVBAoMFkdvb2ds\n\
ZSBBdXRvbW90aXZlIExpbmswggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB\n\
AQDUH+iIbwwVb74NdI5eBv/ACFmh4ml/NOW7gUVWdYX50n8uQQsHHLCNIhk5VV2H\n\
hanvAZ/XXHPuVAPadE2HpnNqePKF/RDo4eJo/+rOief8gBYq/Z+OQTZeLdNm+GoI\n\
HBrEjU4Ms8IdLuFW0jF8LlIRgekjLHpc7duUl3QpwBlmAWQK40T/SZjprlmhyqfJ\n\
g1rxFdnGbrSibmCsTmb3m6WZyZUyrcwmd7t6q3pHbMABO+o02asPG/YPj/SJo4+i\n\
fb5/Nk56f3hH9pBiPKQXJnVUdVLKMXSRgydDBsGSBol4C0JL77MNDrMR5jdafJ4j\n\
mWmsa2+mnzoAv9AxEL9T0LiNAgMBAAGjUDBOMB0GA1UdDgQWBBS5dqvv8DPQiwrM\n\
fgn8xKR91k7wgjAfBgNVHSMEGDAWgBS5dqvv8DPQiwrMfgn8xKR91k7wgjAMBgNV\n\
HRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQDKcnBsrbB0Jbz2VGJKP2lwYB6P\n\
dCTCCpQu7dVp61UQOX+zWfd2hnNMnLs/r1xPO+eyN0vmw7sD05phaIhbXVauKWZi\n\
9WqWHTaR+9s6CTyBOc1Mye0DMj+4vHt+WLmf0lYjkYUVYvR1EImX8ktXzkVmOqn+\n\
e30siqlZ8pQpsOgegIKfJ+pNQM8c3eXVv3KFMUgjZW33SziZL8IMsLvSO+1LtH37\n\
KqbTEMP6XUwVuZopgGvaHU74eT/WSRGlL7vX4OL5/UXXP4qsGH2Zp7uQlErv4H9j\n\
kMs37UL1vGb4M8RM7Eyu9/RulepSmqZUF+3i+3eby8iGq/3OWk9wgJf7AXnx\n\
-----END CERTIFICATE-----\n";
/// The head unit client certificate (PEM).
/// Google-Android-Reference cert, signed by Google Automotive Link CA.
/// From: https://github.com/borconi/headunit/blob/master/jni/hu_ssl.h
/// X.509 v1 with proper UTC dates (mbedtls-compatible).
pub const CERTIFICATE: &[u8] = b"-----BEGIN CERTIFICATE-----\n\
MIIDJTCCAg0CAnZTMA0GCSqGSIb3DQEBCwUAMFsxCzAJBgNVBAYTAlVTMRMwEQYD\n\
VQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1Nb3VudGFpbiBWaWV3MR8wHQYDVQQK\n\
DBZHb29nbGUgQXV0b21vdGl2ZSBMaW5rMB4XDTE0MDcwODIyNDkxOFoXDTQ0MDcw\n\
NzIyNDkxOFowVTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1N\n\
b3VudGFpbiBWaWV3MSEwHwYDVQQKDBhHb29nbGUtQW5kcm9pZC1SZWZlcmVuY2Uw\n\
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpqQmvoDW/XsREoj20dRcM\n\
qJGWh8RlUoHB8CpBpsoqV4nAuvNngkyrdpCf1yg0fVAp2Ugj5eOtzbiN6BxoNHpP\n\
giZ64pc+JRlwjmyHpssDaHzP+zHZM7acwMcroNVyynSzpiydEDyx/KPtEz5AsKi7\n\
c7AYYEtnCmAnK/waN1RT5KdZ9f97D9NeF7Ljdk+IKFROJh7Nv/YGiv9GdPZh/ezS\n\
m2qhD3gzdh9PYs2cu0u+N17PYpSYB7vXPcYa/gmIVipIJ5RuMQVBWrCgtfzwKPqb\n\
nJQVykm8LnysK+8RCgmPLN3uhsZx6Whax2TVXb1q68DoiaFPhvMfPr2i/9IKaC69\n\
AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAIpfjQriEtbpUyWLoOOfJsjFN04+ajq9\n\
1XALCPd+2ixWHZIBJiucrrf0H7OgY7eFnNbU0cRqiDZHI8BtvzFxNi/JgXqCmSHR\n\
rlaoIsITfqo8KHwcAMs4qWTeLQmkTXBZYz0M3HwC7N1vOGjAJJN5qENIm1Jq+/3c\n\
fxVg2zhHPKY8qtdgl73YIXb9Xx3WmPCBeRBCKJncj0Rq14uaOjWXRyBgbmdzMXJz\n\
FGPHx3wN04JqGyfPFlDazXExFQwuAryjoYBRdxPxGufeQCp3am4xxI2oxNIzR+4L\n\
nOcDhgU1B7sbkVzbKj5gjdOQAmxnKCfBtUNB63a7yzGPYGPIwlBsm54=\n\
-----END CERTIFICATE-----\n";
/// The head unit client private key (PKCS#8 PEM).
/// Matches the Google-Android-Reference certificate above.
pub const PRIVATE_KEY: &[u8] = b"-----BEGIN PRIVATE KEY-----\n\
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCpqQmvoDW/XsRE\n\
oj20dRcMqJGWh8RlUoHB8CpBpsoqV4nAuvNngkyrdpCf1yg0fVAp2Ugj5eOtzbiN\n\
6BxoNHpPgiZ64pc+JRlwjmyHpssDaHzP+zHZM7acwMcroNVyynSzpiydEDyx/KPt\n\
Ez5AsKi7c7AYYEtnCmAnK/waN1RT5KdZ9f97D9NeF7Ljdk+IKFROJh7Nv/YGiv9G\n\
dPZh/ezSm2qhD3gzdh9PYs2cu0u+N17PYpSYB7vXPcYa/gmIVipIJ5RuMQVBWrCg\n\
tfzwKPqbnJQVykm8LnysK+8RCgmPLN3uhsZx6Whax2TVXb1q68DoiaFPhvMfPr2i\n\
/9IKaC69AgMBAAECggEAbBoW3963IG6jpA+0PW11+EzYJw/u5ZiCsS3z3s0Fd6E7\n\
VqBIQyXU8FOlpxMSvQ8zqtaVjroGLlIsS88feo4leM+28Qm70I8W/I7jPDPcmxlS\n\
nbqycnDu5EY5IeVi27eAUI+LUbBs3APb900Rl2p4uKfoBkAlC0yjI5J1GcczZhf7\n\
RDh1wGgFWZI+ljiSrfpdiA4XmcZ9c7FlO5+NTotZzYeNx1iZprajV1/dlDy8UWEk\n\
woWtppeGzUf3HHgl8yay62ub2vo5I1Z7Z98Roq8KC1o7k2IXOrHztCl3X03gMwlI\n\
F4WQ6Fx5LZDU9dfaPhzkutekVgbtO9SzHgb3NXCZwQKBgQDcSS/OLll18ssjBwc7\n\
PsdaIFIPlF428Tk8qezEnDmHS6xeztkGnpOlilk9jYSsVUbQmq8MwBSjfMVH95B0\n\
w0yyfOYqjgTocg4lRCoPuBdnuBY/lU1Lws4FoGsGMNFkHWjHzl622mavkJiDzWA+\n\
CORPUllS/DnPKJnZk2n0zZRKaQKBgQDFKqvePMx/a/ayQ09UZYxov0vwRyNkHevm\n\
wEGQjOiHKozWvLqWhCvFtwo+VqHqmCw95cYUpg1GvppB6Lnw2uHgWAWxr3ugDjaR\n\
YSqG/L7FG6FDF+1sPvBuxNpBmto59TI1fBFmU9VBGLDnr1M27qH3KTWlA3lCsovV\n\
6Dbk7D+vNQKBgE6GgFYdS6KyFBu+a6OA84t7LgWDvDoVr3Oil1ZW4mMKZL2/OroT\n\
WUqPkNRSWFMeawn9uhzvc+v7lE/dPk+BNxwBTgMpcTJzRfue2ueTljRQ+Q1daZpy\n\
LQLwdnZUfLAVk752IGlKXYSEJPoHAiHbBZgJIPJmGy1vqbhXxlOP3SbRAoGBAJoA\n\
Q2/5gy0/sdf5FRxxmOM0D+dkWTNY36pDnrJ+LR1uUcVkckUghWQQHRMl7aBkLaJH\n\
N5lnPdV1CN3UHnAPNwBZIFFyJJiWoW6aO3JmNceVVjcmmE7FNlz+qw81GaDNcOMv\n\
vhN0BYyr8Xl1iwTMDXwVFw6FkRBUjz6L+1yBXxjFAoGAJZcU+tEM1+gHPCqHK2bP\n\
kfYOCyEAro4zY/VWXZKHgCoPau8Uc9+vFu2QVMb5kVyLTdyRLQKpooR6f8En6utS\n\
/G15YuqRYqzSTrMBzpRrqIwbgKI9RHNPAvhtVAmXnwsYDPIQ1rrELK6WzTjUySRd\n\
7gyCoq+DlY7ZKDa7FUz05Ek=\n\
-----END PRIVATE KEY-----\n";
+304
View File
@@ -0,0 +1,304 @@
//! Stub channel descriptors for channels required by Android Auto
//! but not used in navigation-only mode.
//!
//! The phone expects to see video, audio, sensor, and input channels
//! during service discovery. We advertise them but handle their data
//! minimally (ack and discard).
use protobuf::{Enum, Message};
use crate::frame::{ChannelId, Frame};
use crate::proto::Wifi;
// ---------------------------------------------------------------------------
// Video channel stub
// ---------------------------------------------------------------------------
/// Build a video channel descriptor (480p, 30fps).
pub fn build_video_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut video = Wifi::AVChannel::new();
video.set_stream_type(Wifi::avstream_type::Enum::VIDEO);
video.set_available_while_in_call(true);
video.set_audio_type(Wifi::audio_type::Enum::SYSTEM);
let mut config = Wifi::VideoConfig::new();
config.set_video_resolution(Wifi::video_resolution::Enum::_480p);
config.set_video_fps(Wifi::video_fps::Enum::_30);
config.set_dpi(160);
config.set_margin_height(0);
config.set_margin_width(0);
video.video_configs.push(config);
chan.av_channel = ::protobuf::MessageField::some(video);
chan
}
/// Build a video setup response frame.
pub fn video_setup_response_frame(channel_id: ChannelId) -> Frame {
let mut resp = Wifi::AVChannelSetupResponse::new();
resp.set_media_status(Wifi::avchannel_setup_status::Enum::OK);
resp.set_max_unacked(2);
resp.configs.push(0);
let mut data = Vec::new();
let t = (Wifi::avchannel_message::Enum::SETUP_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&resp.write_to_bytes().unwrap());
Frame::new_encrypted(channel_id, data)
}
/// Build a video focus indication frame.
pub fn video_focus_frame(channel_id: ChannelId, focused: bool) -> Frame {
let mut ind = Wifi::VideoFocusIndication::new();
ind.set_focus_mode(if focused {
Wifi::video_focus_mode::Enum::FOCUSED
} else {
Wifi::video_focus_mode::Enum::UNFOCUSED
});
ind.set_unrequested(false);
let mut data = Vec::new();
let t = (Wifi::avchannel_message::Enum::VIDEO_FOCUS_INDICATION as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&ind.write_to_bytes().unwrap());
Frame::new_encrypted(channel_id, data)
}
/// Build an audio setup response frame.
pub fn audio_setup_response_frame(channel_id: ChannelId) -> Frame {
let mut resp = Wifi::AVChannelSetupResponse::new();
resp.set_media_status(Wifi::avchannel_setup_status::Enum::OK);
resp.set_max_unacked(2);
resp.configs.push(0);
let mut data = Vec::new();
let t = (Wifi::avchannel_message::Enum::SETUP_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&resp.write_to_bytes().unwrap());
Frame::new_encrypted(channel_id, data)
}
/// Build a media ack frame (acknowledges received video/audio packets).
pub fn media_ack_frame(channel_id: ChannelId, session: i32, value: u32) -> Frame {
let mut ack = Wifi::AVMediaAckIndication::new();
ack.set_session(session);
ack.set_value(value);
let mut data = Vec::new();
let t = (Wifi::avchannel_message::Enum::AV_MEDIA_ACK_INDICATION as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&ack.write_to_bytes().unwrap());
Frame::new_encrypted(channel_id, data)
}
// ---------------------------------------------------------------------------
// Audio channel stubs (media, system, speech)
// ---------------------------------------------------------------------------
/// Build the media audio channel descriptor (stereo, 48kHz).
pub fn build_media_audio_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut audio = Wifi::AVChannel::new();
audio.set_audio_type(Wifi::audio_type::Enum::MEDIA);
audio.set_available_while_in_call(true);
audio.set_stream_type(Wifi::avstream_type::Enum::AUDIO);
let mut config = Wifi::AudioConfig::new();
config.set_sample_rate(48000);
config.set_bit_depth(16);
config.set_channel_count(2);
audio.audio_configs.push(config);
chan.av_channel = ::protobuf::MessageField::some(audio);
chan
}
/// Build the speech audio channel descriptor (mono, 16kHz).
pub fn build_speech_audio_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut audio = Wifi::AVChannel::new();
audio.set_audio_type(Wifi::audio_type::Enum::SPEECH);
audio.set_available_while_in_call(true);
audio.set_stream_type(Wifi::avstream_type::Enum::AUDIO);
let mut config = Wifi::AudioConfig::new();
config.set_sample_rate(16000);
config.set_bit_depth(16);
config.set_channel_count(1);
audio.audio_configs.push(config);
chan.av_channel = ::protobuf::MessageField::some(audio);
chan
}
/// Build the system audio channel descriptor (mono, 16kHz).
pub fn build_system_audio_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut audio = Wifi::AVChannel::new();
audio.set_audio_type(Wifi::audio_type::Enum::SYSTEM);
audio.set_available_while_in_call(true);
audio.set_stream_type(Wifi::avstream_type::Enum::AUDIO);
let mut config = Wifi::AudioConfig::new();
config.set_sample_rate(16000);
config.set_bit_depth(16);
config.set_channel_count(1);
audio.audio_configs.push(config);
chan.av_channel = ::protobuf::MessageField::some(audio);
chan
}
/// Build the AV input (microphone) channel descriptor.
pub fn build_avinput_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut avinput = Wifi::AVInputChannel::new();
avinput.set_stream_type(Wifi::avstream_type::Enum::AUDIO);
let mut config = Wifi::AudioConfig::new();
config.set_sample_rate(16000);
config.set_bit_depth(16);
config.set_channel_count(1);
avinput.audio_config = ::protobuf::MessageField::some(config);
chan.av_input_channel = ::protobuf::MessageField::some(avinput);
chan
}
/// Build the media info/status channel descriptor.
pub fn build_media_info_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let media_info = Wifi::MediaInfoChannel::new();
chan.media_infoChannel = ::protobuf::MessageField::some(media_info);
chan
}
// ---------------------------------------------------------------------------
// Sensor channel stub
// ---------------------------------------------------------------------------
/// Build a sensor channel descriptor (night mode only — minimal).
pub fn build_sensor_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut sensor = Wifi::SensorChannel::new();
// Advertise night mode sensor (required minimum)
let mut night = Wifi::Sensor::new();
night.set_type(Wifi::sensor_type::Enum::NIGHT_DATA);
sensor.sensors.push(night);
chan.sensor_channel = ::protobuf::MessageField::some(sensor);
chan
}
// ---------------------------------------------------------------------------
// Input channel stub
// ---------------------------------------------------------------------------
/// Build an input channel descriptor (touch screen).
pub fn build_input_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut input = Wifi::InputChannel::new();
let mut ts_config = Wifi::TouchConfig::new();
ts_config.set_width(480);
ts_config.set_height(320);
input.touch_screen_config = ::protobuf::MessageField::some(ts_config);
chan.input_channel = ::protobuf::MessageField::some(input);
chan
}
// ---------------------------------------------------------------------------
// AV message parsing helpers (for acking)
// ---------------------------------------------------------------------------
/// The type of AV channel message received.
#[derive(Debug)]
pub enum AvMessage {
SetupRequest { config_index: u32 },
StartIndication { session: i32, config: u32 },
StopIndication,
MediaData { timestamp: Option<u64>, data: Vec<u8> },
VideoFocusRequest { focused: bool },
AckIndication { session: i32, value: u32 },
InputOpenRequest,
}
/// Try to parse an AV channel frame.
pub fn parse_av_frame(frame: &Frame) -> Result<AvMessage, String> {
let mut ty = [0u8; 2];
ty.copy_from_slice(&frame.data[0..2]);
let ty = u16::from_be_bytes(ty);
let Some(msg_type) = Wifi::avchannel_message::Enum::from_i32(ty as i32) else {
return Err(format!("Unknown AV message 0x{:x}", ty));
};
match msg_type {
Wifi::avchannel_message::Enum::SETUP_REQUEST => {
let m = Wifi::AVChannelSetupRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid AVChannelSetupRequest: {}", e))?;
Ok(AvMessage::SetupRequest {
config_index: m.config_index(),
})
}
Wifi::avchannel_message::Enum::START_INDICATION => {
let m = Wifi::AVChannelStartIndication::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid AVChannelStartIndication: {}", e))?;
Ok(AvMessage::StartIndication {
session: m.session(),
config: m.config(),
})
}
Wifi::avchannel_message::Enum::STOP_INDICATION => Ok(AvMessage::StopIndication),
Wifi::avchannel_message::Enum::AV_MEDIA_WITH_TIMESTAMP_INDICATION => {
let ts = u64::from_be_bytes(
frame.data[2..10]
.try_into()
.map_err(|_| "Short timestamp")?,
);
Ok(AvMessage::MediaData {
timestamp: Some(ts),
data: frame.data[10..].to_vec(),
})
}
Wifi::avchannel_message::Enum::AV_MEDIA_INDICATION => Ok(AvMessage::MediaData {
timestamp: None,
data: frame.data[2..].to_vec(),
}),
Wifi::avchannel_message::Enum::VIDEO_FOCUS_REQUEST => {
let m = Wifi::VideoFocusRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid VideoFocusRequest: {}", e))?;
let focused =
m.focus_mode() == Wifi::video_focus_mode::Enum::FOCUSED;
Ok(AvMessage::VideoFocusRequest { focused })
}
Wifi::avchannel_message::Enum::AV_MEDIA_ACK_INDICATION => {
let m = Wifi::AVMediaAckIndication::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid AVMediaAckIndication: {}", e))?;
Ok(AvMessage::AckIndication {
session: m.session(),
value: m.value(),
})
}
Wifi::avchannel_message::Enum::AV_INPUT_OPEN_REQUEST => {
Ok(AvMessage::InputOpenRequest)
}
_ => Err(format!("Unhandled AV message 0x{:x}", ty as u32)),
}
}
+54
View File
@@ -0,0 +1,54 @@
//! Messages common to all Android Auto channels.
//!
//! Ported from the upstream `android-auto` crate — same wire format,
//! but using our synchronous Frame type instead of tokio-based async.
use protobuf::{Enum, Message};
use crate::frame::{ChannelId, Frame};
use crate::proto::Wifi;
/// Messages that can appear on any channel (channel-open handshake).
#[derive(Debug)]
pub enum CommonMessage {
ChannelOpenRequest(Wifi::ChannelOpenRequest),
ChannelOpenResponse(ChannelId, Wifi::ChannelOpenResponse),
}
impl CommonMessage {
/// Try to parse a common message from a frame.
pub fn try_from_frame(frame: &Frame) -> Result<Self, String> {
if !frame.header.frame.get_control() {
return Err("Not a control/common message".into());
}
let mut ty = [0u8; 2];
ty.copy_from_slice(&frame.data[0..2]);
let ty = u16::from_be_bytes(ty);
let Some(msg_type) = Wifi::CommonMessage::from_i32(ty as i32) else {
return Err(format!("Unknown common message type 0x{:x}", ty));
};
match msg_type {
Wifi::CommonMessage::CHANNEL_OPEN_REQUEST => {
let m = Wifi::ChannelOpenRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid ChannelOpenRequest: {}", e))?;
Ok(Self::ChannelOpenRequest(m))
}
Wifi::CommonMessage::CHANNEL_OPEN_RESPONSE => {
Err("Unexpected ChannelOpenResponse from phone".into())
}
}
}
/// Serialize a ChannelOpenResponse into a frame.
pub fn open_response_frame(channel_id: ChannelId, status: Wifi::status::Enum) -> Frame {
let mut resp = Wifi::ChannelOpenResponse::new();
resp.set_status(status);
let mut data = Vec::new();
let t = (Wifi::CommonMessage::CHANNEL_OPEN_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&resp.write_to_bytes().unwrap());
Frame::new_common_response(channel_id, data)
}
}
+50
View File
@@ -0,0 +1,50 @@
//! Head unit configuration.
/// Configuration for the Android Auto head unit identity.
#[derive(Debug, Clone)]
pub struct HeadUnitConfig {
pub name: String,
pub car_model: String,
pub car_year: String,
pub car_serial: String,
pub left_hand: bool,
pub manufacturer: String,
pub model: String,
pub sw_build: String,
pub sw_version: String,
}
impl Default for HeadUnitConfig {
fn default() -> Self {
Self {
name: "ESP32 Android Auto".into(),
car_model: "WT32-SC01 Plus".into(),
car_year: "2024".into(),
car_serial: "ESP32S3-001".into(),
left_hand: true,
manufacturer: "ESP32 DIY".into(),
model: "NavHUD-1".into(),
sw_build: "1".into(),
sw_version: "0.1.0".into(),
}
}
}
/// WiFi AP configuration for the Android Auto TCP connection.
#[derive(Debug, Clone)]
pub struct WifiConfig {
pub ssid: String,
pub password: String,
/// TCP port to listen on for the Android Auto data connection.
pub listen_port: u16,
}
impl Default for WifiConfig {
fn default() -> Self {
Self {
ssid: "ESP32-AA-HU".into(),
password: "androidauto123".into(),
listen_port: 5277,
}
}
}
+223
View File
@@ -0,0 +1,223 @@
//! Control channel handler for Android Auto.
//!
//! Handles: version handshake, TLS negotiation, service discovery,
//! ping/pong, audio focus, navigation focus, shutdown.
use protobuf::{Enum, Message};
use crate::frame::{self, ChannelId, Frame, FrameHeaderContents, FrameHeaderType, FrameHeader, VERSION};
use crate::proto::Wifi;
/// Parsed control channel messages.
#[derive(Debug)]
pub enum ControlMessage {
VersionRequest,
VersionResponse { major: u16, minor: u16, status: u16 },
SslHandshake(Vec<u8>),
SslAuthComplete(bool),
ServiceDiscoveryRequest(Wifi::ServiceDiscoveryRequest),
ServiceDiscoveryResponse(Wifi::ServiceDiscoveryResponse),
AudioFocusRequest(Wifi::AudioFocusRequest),
AudioFocusResponse(Wifi::AudioFocusResponse),
PingRequest(Wifi::PingRequest),
PingResponse(Wifi::PingResponse),
ShutdownRequest(Wifi::ShutdownRequest),
ShutdownResponse,
NavigationFocusRequest(Wifi::NavigationFocusRequest),
NavigationFocusResponse(Wifi::NavigationFocusResponse),
VoiceSession(Wifi::VoiceSessionRequest),
}
impl ControlMessage {
/// Try to parse a control message from a frame.
pub fn try_from_frame(frame: &Frame) -> Result<Self, String> {
if frame.header.frame.get_control() {
return Err("This is a common/specific message, not control".into());
}
let mut ty = [0u8; 2];
ty.copy_from_slice(&frame.data[0..2]);
let ty = u16::from_be_bytes(ty);
let Some(msg_type) = Wifi::ControlMessage::from_i32(ty as i32) else {
return Err(format!("Unknown control message 0x{:x}", ty));
};
match msg_type {
Wifi::ControlMessage::VERSION_RESPONSE => {
if frame.data.len() >= 6 {
let major = u16::from_be_bytes([frame.data[2], frame.data[3]]);
let minor = u16::from_be_bytes([frame.data[4], frame.data[5]]);
// Status field is optional — absent means OK (0)
let status = if frame.data.len() >= 8 {
u16::from_be_bytes([frame.data[6], frame.data[7]])
} else {
0
};
Ok(Self::VersionResponse { major, minor, status })
} else {
Err(format!("Invalid version response length: {}", frame.data.len()))
}
}
Wifi::ControlMessage::SSL_HANDSHAKE => {
Ok(Self::SslHandshake(frame.data[2..].to_vec()))
}
Wifi::ControlMessage::SERVICE_DISCOVERY_REQUEST => {
let m = Wifi::ServiceDiscoveryRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid ServiceDiscoveryRequest: {}", e))?;
Ok(Self::ServiceDiscoveryRequest(m))
}
Wifi::ControlMessage::PING_REQUEST => {
let m = Wifi::PingRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid PingRequest: {}", e))?;
Ok(Self::PingRequest(m))
}
Wifi::ControlMessage::PING_RESPONSE => {
let m = Wifi::PingResponse::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid PingResponse: {}", e))?;
Ok(Self::PingResponse(m))
}
Wifi::ControlMessage::AUDIO_FOCUS_REQUEST => {
let m = Wifi::AudioFocusRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid AudioFocusRequest: {}", e))?;
Ok(Self::AudioFocusRequest(m))
}
Wifi::ControlMessage::SHUTDOWN_REQUEST => {
let m = Wifi::ShutdownRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid ShutdownRequest: {}", e))?;
Ok(Self::ShutdownRequest(m))
}
Wifi::ControlMessage::NAVIGATION_FOCUS_REQUEST => {
let m = Wifi::NavigationFocusRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid NavigationFocusRequest: {}", e))?;
Ok(Self::NavigationFocusRequest(m))
}
Wifi::ControlMessage::VOICE_SESSION_REQUEST => {
let m = Wifi::VoiceSessionRequest::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid VoiceSessionRequest: {}", e))?;
Ok(Self::VoiceSession(m))
}
_ => Err(format!("Unhandled control message 0x{:x}", ty as u32)),
}
}
}
// ---------------------------------------------------------------------------
// Frame builders for outgoing control messages
// ---------------------------------------------------------------------------
/// Build the VersionRequest frame.
pub fn version_request_frame() -> Frame {
let mut data = Vec::with_capacity(6);
let t = (Wifi::ControlMessage::VERSION_REQUEST as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&VERSION.0.to_be_bytes());
data.extend_from_slice(&VERSION.1.to_be_bytes());
Frame::new_control_unencrypted(data)
}
/// Build an SSL handshake frame with the given TLS data.
pub fn ssl_handshake_frame(tls_data: Vec<u8>) -> Frame {
let mut data = Vec::new();
let t = (Wifi::ControlMessage::SSL_HANDSHAKE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&tls_data);
Frame::new_control_unencrypted(data)
}
/// Build an SslAuthComplete frame.
pub fn ssl_auth_complete_frame(success: bool) -> Frame {
let mut m = Wifi::AuthCompleteIndication::new();
let status = if success {
Wifi::AuthCompleteIndicationStatus::OK
} else {
Wifi::AuthCompleteIndicationStatus::FAIL
};
m.set_status(status);
let mut data = Vec::new();
let t = (Wifi::ControlMessage::AUTH_COMPLETE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_unencrypted(data)
}
/// Build a PingResponse frame.
pub fn ping_response_frame(timestamp: i64) -> Frame {
let mut m = Wifi::PingResponse::new();
m.set_timestamp(timestamp + 1);
let mut data = Vec::new();
let t = (Wifi::ControlMessage::PING_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_unencrypted(data)
}
/// Build an AudioFocusResponse frame.
pub fn audio_focus_response_frame(request: &Wifi::AudioFocusRequest) -> Frame {
let mut m = Wifi::AudioFocusResponse::new();
let s = if request.has_audio_focus_type() {
match request.audio_focus_type() {
Wifi::audio_focus_type::Enum::NONE => Wifi::audio_focus_state::Enum::NONE,
Wifi::audio_focus_type::Enum::GAIN => Wifi::audio_focus_state::Enum::GAIN,
Wifi::audio_focus_type::Enum::GAIN_TRANSIENT => Wifi::audio_focus_state::Enum::GAIN_TRANSIENT,
Wifi::audio_focus_type::Enum::GAIN_NAVI => Wifi::audio_focus_state::Enum::GAIN,
Wifi::audio_focus_type::Enum::RELEASE => Wifi::audio_focus_state::Enum::LOSS,
}
} else {
Wifi::audio_focus_state::Enum::NONE
};
m.set_audio_focus_state(s);
let mut data = Vec::new();
let t = (Wifi::ControlMessage::AUDIO_FOCUS_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_encrypted(data)
}
/// Build a NavigationFocusResponse frame.
pub fn nav_focus_response_frame() -> Frame {
let mut m = Wifi::NavigationFocusResponse::new();
m.set_type(2); // Accept navigation focus
let mut data = Vec::new();
let t = (Wifi::ControlMessage::NAVIGATION_FOCUS_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_encrypted(data)
}
/// Build a ShutdownResponse frame.
pub fn shutdown_response_frame() -> Frame {
let m = Wifi::ShutdownResponse::new();
let mut data = Vec::new();
let t = (Wifi::ControlMessage::SHUTDOWN_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_encrypted(data)
}
/// Build the ServiceDiscoveryResponse frame with our channel list.
pub fn service_discovery_response_frame(
config: &crate::config::HeadUnitConfig,
channels: &[Wifi::ChannelDescriptor],
) -> Frame {
let mut m = Wifi::ServiceDiscoveryResponse::new();
m.set_head_unit_name(config.name.clone());
m.set_car_model(config.car_model.clone());
m.set_car_year(config.car_year.clone());
m.set_car_serial(config.car_serial.clone());
m.set_left_hand_drive_vehicle(config.left_hand);
m.set_headunit_manufacturer(config.manufacturer.clone());
m.set_headunit_model(config.model.clone());
m.set_sw_build(config.sw_build.clone());
m.set_sw_version(config.sw_version.clone());
m.set_can_play_native_media_during_vr(false);
for chan in channels {
m.channels.push(chan.clone());
}
let mut data = Vec::new();
let t = (Wifi::ControlMessage::SERVICE_DISCOVERY_RESPONSE as u16).to_be_bytes();
data.extend_from_slice(&t);
data.extend_from_slice(&m.write_to_bytes().unwrap());
Frame::new_control_encrypted(data)
}
+446
View File
@@ -0,0 +1,446 @@
//! H.264 software decoder using Espressif's esp_h264 component.
//!
//! Decodes H.264 NAL units from Android Auto's video channel into
//! RGB565 framebuffer data suitable for the ST7796 display.
//!
//! Pipeline: H.264 NAL → esp_h264 decoder → I420 (YUV) → downscale → RGB565
//!
//! Performance on ESP32-S3 (dual-task decoder):
//! 320×192 → ~27 fps
//! 640×480 → ~11 fps
//!
//! Android Auto sends 800×480 minimum, so we decode and downscale to
//! fit the 480×320 display.
use anyhow::{Result, bail, Context};
/// Display dimensions (WT32-SC01 Plus: ST7796 480×320)
pub const DISPLAY_WIDTH: u32 = 480;
pub const DISPLAY_HEIGHT: u32 = 320;
/// Configuration for the video decoder.
#[derive(Debug, Clone)]
pub struct DecoderConfig {
/// Source width from Android Auto (e.g., 800)
pub source_width: u32,
/// Source height from Android Auto (e.g., 480)
pub source_height: u32,
/// Target width for display output
pub target_width: u32,
/// Target height for display output
pub target_height: u32,
}
impl Default for DecoderConfig {
fn default() -> Self {
Self {
source_width: 800,
source_height: 480,
target_width: DISPLAY_WIDTH,
target_height: DISPLAY_HEIGHT,
}
}
}
/// H.264 decoder state.
///
/// Wraps the esp_h264 C decoder API via unsafe FFI.
/// All large buffers are allocated in PSRAM (8MB) to avoid internal SRAM OOM.
pub struct H264Decoder {
config: DecoderConfig,
/// Decoded I420 buffer (Y + U + V planes) — in PSRAM
i420_buf: PsramBuf,
/// Output RGB565 framebuffer (target dimensions) — in PSRAM
rgb565_buf: PsramBuf,
/// NAL unit accumulator (H.264 frames arrive in chunks) — in PSRAM
nal_buf: PsramBuf,
/// Current write position in nal_buf
nal_len: usize,
/// Total frames decoded (for stats)
frames_decoded: u64,
}
/// A buffer allocated in PSRAM via heap_caps_malloc.
struct PsramBuf {
ptr: *mut u8,
capacity: usize,
}
impl PsramBuf {
fn new(size: usize) -> Result<Self> {
let ptr = unsafe {
esp_idf_sys::heap_caps_malloc(size, esp_idf_sys::MALLOC_CAP_SPIRAM)
};
if ptr.is_null() {
let free = unsafe { esp_idf_sys::esp_get_free_heap_size() };
bail!(
"PSRAM alloc failed: {} KB requested, {} KB free heap",
size / 1024,
free / 1024,
);
}
unsafe { std::ptr::write_bytes(ptr as *mut u8, 0, size); }
Ok(Self { ptr: ptr as *mut u8, capacity: size })
}
fn as_slice(&self, len: usize) -> &[u8] {
unsafe { std::slice::from_raw_parts(self.ptr, len.min(self.capacity)) }
}
fn as_mut_slice(&mut self) -> &mut [u8] {
unsafe { std::slice::from_raw_parts_mut(self.ptr, self.capacity) }
}
fn as_u16_slice(&self, pixel_count: usize) -> &[u16] {
unsafe { std::slice::from_raw_parts(self.ptr as *const u16, pixel_count) }
}
fn as_u16_mut_slice(&mut self, pixel_count: usize) -> &mut [u16] {
unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut u16, pixel_count) }
}
}
impl Drop for PsramBuf {
fn drop(&mut self) {
unsafe { esp_idf_sys::heap_caps_free(self.ptr as *mut std::ffi::c_void); }
}
}
impl H264Decoder {
/// Create a new decoder instance. All large buffers go to PSRAM.
pub fn new(config: DecoderConfig) -> Result<Self> {
let i420_size = (config.source_width * config.source_height * 3 / 2) as usize;
let rgb565_size = (config.target_width * config.target_height * 2) as usize;
let nal_capacity = 512 * 1024; // 512KB for keyframes
log::info!(
"H.264 decoder: {}×{} → {}×{} (I420: {}KB, RGB565: {}KB, NAL: {}KB) — all PSRAM",
config.source_width, config.source_height,
config.target_width, config.target_height,
i420_size / 1024, rgb565_size / 1024, nal_capacity / 1024,
);
let i420_buf = PsramBuf::new(i420_size).context("allocating I420 buffer in PSRAM")?;
let rgb565_buf = PsramBuf::new(rgb565_size).context("allocating RGB565 buffer in PSRAM")?;
let nal_buf = PsramBuf::new(nal_capacity).context("allocating NAL buffer in PSRAM")?;
let free_psram = unsafe {
esp_idf_sys::heap_caps_get_free_size(esp_idf_sys::MALLOC_CAP_SPIRAM)
};
log::info!("PSRAM free after decoder init: {} KB", free_psram / 1024);
Ok(Self {
config,
i420_buf,
rgb565_buf,
nal_buf,
nal_len: 0,
frames_decoded: 0,
})
}
/// Feed H.264 data (NAL units) from Android Auto.
/// Returns Some(rgb565 framebuffer slice) when a complete frame is decoded.
pub fn decode(&mut self, h264_data: &[u8]) -> Result<Option<&[u16]>> {
// Accumulate NAL data into PSRAM buffer
let new_len = self.nal_len + h264_data.len();
if new_len > self.nal_buf.capacity {
log::warn!("NAL overflow, discarding {} bytes", self.nal_len);
self.nal_len = 0;
return Ok(None);
}
unsafe {
std::ptr::copy_nonoverlapping(
h264_data.as_ptr(),
self.nal_buf.ptr.add(self.nal_len),
h264_data.len(),
);
}
self.nal_len = new_len;
// TODO: Feed to esp_h264 decoder (bindings not yet generated)
// Generate a test pattern frame periodically to verify display pipeline
if self.nal_len > 32 * 1024 {
self.nal_len = 0;
self.frames_decoded += 1;
// Generate alternating color test pattern to prove display works
let pixels = self.config.target_width * self.config.target_height;
let rgb565 = self.rgb565_buf.as_u16_mut_slice(pixels as usize);
let color = match self.frames_decoded % 4 {
0 => 0x07E0u16, // Green
1 => 0x001Fu16, // Blue
2 => 0xF800u16, // Red
_ => 0xFFFFu16, // White
};
for px in rgb565.iter_mut() {
*px = color;
}
log::info!(
"🎨 Test frame #{} (color=0x{:04X}, {}x{})",
self.frames_decoded, color,
self.config.target_width, self.config.target_height
);
return Ok(Some(self.rgb565_buf.as_u16_slice(pixels as usize)));
}
Ok(None)
}
/// Get the number of frames decoded so far.
pub fn frames_decoded(&self) -> u64 {
self.frames_decoded
}
/// Get the current NAL accumulator length.
pub fn nal_len(&self) -> usize {
self.nal_len
}
/// Get the output framebuffer dimensions.
pub fn output_dimensions(&self) -> (u32, u32) {
(self.config.target_width, self.config.target_height)
}
}
impl Drop for H264Decoder {
fn drop(&mut self) {
// TODO: esp_h264_dec_close(self.decoder_handle);
log::info!(
"H.264 decoder closed after {} frames",
self.frames_decoded
);
}
}
// ---------------------------------------------------------------------------
// I420 → RGB565 conversion with nearest-neighbor downscaling
// ---------------------------------------------------------------------------
/// Convert I420 (YUV 4:2:0 planar) to RGB565 with nearest-neighbor downscaling.
///
/// I420 layout:
/// Y plane: src_w × src_h bytes (one byte per pixel)
/// U plane: (src_w/2) × (src_h/2) bytes
/// V plane: (src_w/2) × (src_h/2) bytes
///
/// RGB565 layout: 16-bit per pixel, [RRRRRGGG_GGGBBBBB]
///
/// Nearest-neighbor scaling maps each output pixel to the closest input pixel,
/// which is fast and sufficient for a 480×320 embedded display.
pub fn i420_to_rgb565_downscale(
i420: &[u8],
src_w: u32,
src_h: u32,
rgb565: &mut [u16],
dst_w: u32,
dst_h: u32,
) {
let y_plane = &i420[..(src_w * src_h) as usize];
let u_offset = (src_w * src_h) as usize;
let v_offset = u_offset + (src_w * src_h / 4) as usize;
let u_plane = &i420[u_offset..v_offset];
let v_plane = &i420[v_offset..];
let uv_stride = (src_w / 2) as usize;
// Precompute horizontal and vertical mapping tables
// This avoids repeated division in the inner loop
let x_map: Vec<u32> = (0..dst_w)
.map(|dx| dx * src_w / dst_w)
.collect();
let y_map: Vec<u32> = (0..dst_h)
.map(|dy| dy * src_h / dst_h)
.collect();
for dy in 0..dst_h {
let src_y = y_map[dy as usize];
let dst_row_offset = (dy * dst_w) as usize;
let y_row_offset = (src_y * src_w) as usize;
let uv_row = (src_y / 2) as usize;
for dx in 0..dst_w {
let src_x = x_map[dx as usize];
// Fetch YUV values
let y_val = y_plane[y_row_offset + src_x as usize] as i32;
let u_val = u_plane[uv_row * uv_stride + (src_x / 2) as usize] as i32 - 128;
let v_val = v_plane[uv_row * uv_stride + (src_x / 2) as usize] as i32 - 128;
// YUV → RGB (BT.601 standard, integer math)
let r = (y_val + ((351 * v_val) >> 8)).clamp(0, 255) as u16;
let g = (y_val - ((179 * v_val + 86 * u_val) >> 8)).clamp(0, 255) as u16;
let b = (y_val + ((443 * u_val) >> 8)).clamp(0, 255) as u16;
// Pack RGB565: RRRRR_GGGGGG_BBBBB
rgb565[dst_row_offset + dx as usize] =
((r >> 3) << 11) | ((g >> 2) << 5) | (b >> 3);
}
}
}
/// Convert I420 to RGB565 without scaling (1:1 copy).
/// Use when source and target have the same dimensions.
pub fn i420_to_rgb565(
i420: &[u8],
width: u32,
height: u32,
rgb565: &mut [u16],
) {
i420_to_rgb565_downscale(i420, width, height, rgb565, width, height);
}
// ---------------------------------------------------------------------------
// Bilinear downscaling (higher quality, more CPU)
// ---------------------------------------------------------------------------
/// Convert I420 to RGB565 with bilinear interpolation downscaling.
/// Higher quality than nearest-neighbor but ~4× slower.
/// Use only if the ESP32-S3 has enough headroom.
pub fn i420_to_rgb565_bilinear(
i420: &[u8],
src_w: u32,
src_h: u32,
rgb565: &mut [u16],
dst_w: u32,
dst_h: u32,
) {
let y_plane = &i420[..(src_w * src_h) as usize];
let u_offset = (src_w * src_h) as usize;
let v_offset = u_offset + (src_w * src_h / 4) as usize;
let u_plane = &i420[u_offset..v_offset];
let v_plane = &i420[v_offset..];
let uv_stride = (src_w / 2) as usize;
let src_stride = src_w as usize;
for dy in 0..dst_h {
// Fixed-point source Y coordinate (8-bit fractional)
let src_y_fp = (dy * ((src_h - 1) << 8)) / dst_h.max(1);
let src_y_int = (src_y_fp >> 8) as usize;
let src_y_frac = (src_y_fp & 0xFF) as u32;
let src_y_next = (src_y_int + 1).min(src_h as usize - 1);
let dst_row = (dy * dst_w) as usize;
for dx in 0..dst_w {
let src_x_fp = (dx * ((src_w - 1) << 8)) / dst_w.max(1);
let src_x_int = (src_x_fp >> 8) as usize;
let src_x_frac = (src_x_fp & 0xFF) as u32;
let src_x_next = (src_x_int + 1).min(src_w as usize - 1);
// Bilinear interpolation on Y plane
let y00 = y_plane[src_y_int * src_stride + src_x_int] as u32;
let y10 = y_plane[src_y_int * src_stride + src_x_next] as u32;
let y01 = y_plane[src_y_next * src_stride + src_x_int] as u32;
let y11 = y_plane[src_y_next * src_stride + src_x_next] as u32;
let y_top = y00 * (256 - src_x_frac) + y10 * src_x_frac;
let y_bot = y01 * (256 - src_x_frac) + y11 * src_x_frac;
let y_val = ((y_top * (256 - src_y_frac) + y_bot * src_y_frac + 32768) >> 16) as i32;
// UV at half resolution — nearest neighbor is fine for chroma
let uv_x = src_x_int / 2;
let uv_y = src_y_int / 2;
let u_val = u_plane[uv_y * uv_stride + uv_x] as i32 - 128;
let v_val = v_plane[uv_y * uv_stride + uv_x] as i32 - 128;
// YUV → RGB (BT.601)
let r = (y_val + ((351 * v_val) >> 8)).clamp(0, 255) as u16;
let g = (y_val - ((179 * v_val + 86 * u_val) >> 8)).clamp(0, 255) as u16;
let b = (y_val + ((443 * u_val) >> 8)).clamp(0, 255) as u16;
rgb565[dst_row + dx as usize] =
((r >> 3) << 11) | ((g >> 2) << 5) | (b >> 3);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_i420_to_rgb565_black_frame() {
// Pure black in I420: Y=0, U=128, V=128
let w = 4u32;
let h = 4u32;
let mut i420 = vec![0u8; (w * h * 3 / 2) as usize];
// Set U and V planes to 128 (neutral chroma)
let uv_start = (w * h) as usize;
for b in &mut i420[uv_start..] {
*b = 128;
}
let mut rgb565 = vec![0u16; (w * h) as usize];
i420_to_rgb565(&i420, w, h, &mut rgb565);
// Black should be 0x0000 in RGB565
for &pixel in &rgb565 {
assert_eq!(pixel, 0x0000, "Expected black pixel");
}
}
#[test]
fn test_i420_to_rgb565_white_frame() {
// Pure white in I420: Y=255, U=128, V=128
let w = 4u32;
let h = 4u32;
let y_size = (w * h) as usize;
let mut i420 = vec![255u8; y_size]; // Y=255
// U and V planes = 128
let uv_size = (w * h / 4) as usize;
i420.extend(vec![128u8; uv_size]); // U
i420.extend(vec![128u8; uv_size]); // V
let mut rgb565 = vec![0u16; (w * h) as usize];
i420_to_rgb565(&i420, w, h, &mut rgb565);
// White should be 0xFFFF in RGB565
for &pixel in &rgb565 {
assert_eq!(pixel, 0xFFFF, "Expected white pixel");
}
}
#[test]
fn test_downscale_dimensions() {
// 8×8 → 4×4 downscale
let src_w = 8u32;
let src_h = 8u32;
let dst_w = 4u32;
let dst_h = 4u32;
let y_size = (src_w * src_h) as usize;
let uv_size = (src_w * src_h / 4) as usize;
let mut i420 = vec![128u8; y_size]; // mid-gray Y
i420.extend(vec![128u8; uv_size]); // U
i420.extend(vec![128u8; uv_size]); // V
let mut rgb565 = vec![0u16; (dst_w * dst_h) as usize];
i420_to_rgb565_downscale(&i420, src_w, src_h, &mut rgb565, dst_w, dst_h);
// All pixels should be the same mid-gray value
let first = rgb565[0];
for &pixel in &rgb565 {
assert_eq!(pixel, first, "All pixels should be same gray");
}
}
#[test]
fn test_800x480_to_480x320_downscale() {
// Android Auto resolution → display resolution
let src_w = 800u32;
let src_h = 480u32;
let dst_w = 480u32;
let dst_h = 320u32;
let y_size = (src_w * src_h) as usize;
let uv_size = (src_w * src_h / 4) as usize;
let mut i420 = vec![200u8; y_size];
i420.extend(vec![128u8; uv_size]);
i420.extend(vec![128u8; uv_size]);
let mut rgb565 = vec![0u16; (dst_w * dst_h) as usize];
i420_to_rgb565_downscale(&i420, src_w, src_h, &mut rgb565, dst_w, dst_h);
// Should produce 480×320 = 153600 pixels
assert_eq!(rgb565.len(), 153600);
// All non-zero (200 Y should produce visible gray)
assert!(rgb565.iter().all(|&p| p != 0));
}
}
+350
View File
@@ -0,0 +1,350 @@
//! Minimal ST7796 display driver for WT32-SC01 Plus.
//!
//! Simple single-buffer approach: write RGB565 pixel data directly
//! to the display via I80 8-bit parallel bus + DMA.
//! No double-buffering, no VSYNC sync — just raw pixel pushing.
use std::ffi::c_void;
use std::ptr;
use anyhow::{Context, Result, bail};
use esp_idf_hal::gpio::{Output, PinDriver};
use esp_idf_sys as sys;
/// Display dimensions (landscape)
pub const DISPLAY_WIDTH: u16 = 480;
pub const DISPLAY_HEIGHT: u16 = 320;
pub const DISPLAY_PIXELS: usize = DISPLAY_WIDTH as usize * DISPLAY_HEIGHT as usize;
/// Height of each DMA strip (lines). Tradeoff: larger = fewer DMA calls, more SRAM.
/// 20 lines × 480 × 2 = 19,200 bytes of internal SRAM.
const STRIP_LINES: usize = 20;
const STRIP_BYTES: usize = DISPLAY_WIDTH as usize * STRIP_LINES * 2;
// WT32-SC01 Plus GPIO pin assignments
const PIN_D0: i32 = 9;
const PIN_D1: i32 = 46;
const PIN_D2: i32 = 3;
const PIN_D3: i32 = 8;
const PIN_D4: i32 = 18;
const PIN_D5: i32 = 17;
const PIN_D6: i32 = 16;
const PIN_D7: i32 = 15;
const PIN_WR: i32 = 47;
const PIN_DC: i32 = 0;
const PIN_RST: i32 = 4;
const PIN_BACKLIGHT: i32 = 45;
/// Simple display handle.
pub struct Display {
panel_handle: sys::esp_lcd_panel_handle_t,
io_handle: sys::esp_lcd_panel_io_handle_t,
/// DMA staging buffer in internal SRAM (NOT PSRAM).
/// PSRAM data can't be DMA'd directly on ESP32-S3 I80 bus reliably.
/// We copy strip-by-strip: PSRAM → stage → DMA → display.
dma_stage: *mut u8,
}
// SAFETY: The display handles are used from a single thread (video_display_loop)
unsafe impl Send for Display {}
impl Display {
/// Initialize the ST7796 display on the I80 bus.
pub fn init(
reset_pin: impl esp_idf_hal::gpio::OutputPin + 'static,
backlight_pin: impl esp_idf_hal::gpio::OutputPin + 'static,
) -> Result<Self> {
unsafe {
// Hardware reset
let mut reset_driver = PinDriver::output(reset_pin)?;
reset_driver.set_high()?;
std::thread::sleep(std::time::Duration::from_millis(100));
reset_driver.set_low()?;
std::thread::sleep(std::time::Duration::from_millis(100));
reset_driver.set_high()?;
std::thread::sleep(std::time::Duration::from_millis(120));
std::mem::forget(reset_driver);
// Backlight ON (simple GPIO high, no PWM)
let mut bl_pin = PinDriver::output(backlight_pin)?;
bl_pin.set_high()?;
std::mem::forget(bl_pin);
// Create I80 bus
let bus_handle = Self::init_i80_bus()?;
// Create panel IO
let io_handle = Self::init_panel_io(bus_handle)?;
// Create and initialize ST7796 panel
let panel_handle = Self::init_panel(io_handle)?;
// Allocate DMA staging buffer in internal SRAM
let dma_stage = sys::heap_caps_malloc(
STRIP_BYTES,
sys::MALLOC_CAP_DMA | sys::MALLOC_CAP_INTERNAL,
) as *mut u8;
if dma_stage.is_null() {
bail!("Failed to allocate {}B DMA stage buffer in internal SRAM", STRIP_BYTES);
}
log::info!("ST7796 display initialized ({}×{}, DMA stage {}B)", DISPLAY_WIDTH, DISPLAY_HEIGHT, STRIP_BYTES);
Ok(Self { panel_handle, io_handle, dma_stage })
}
}
/// Write a full-screen RGB565 framebuffer to the display.
///
/// `data` must be exactly DISPLAY_WIDTH × DISPLAY_HEIGHT × 2 bytes.
/// Copies through internal SRAM staging buffer for DMA compatibility
/// (PSRAM data can't be DMA'd reliably on the I80 bus).
pub fn draw_rgb565(&self, data: &[u8]) {
debug_assert_eq!(data.len(), DISPLAY_PIXELS * 2);
for y in (0..DISPLAY_HEIGHT as usize).step_by(STRIP_LINES) {
let h = STRIP_LINES.min(DISPLAY_HEIGHT as usize - y);
let offset = y * DISPLAY_WIDTH as usize * 2;
let len = DISPLAY_WIDTH as usize * h * 2;
// Copy from PSRAM → internal SRAM staging buffer
unsafe {
ptr::copy_nonoverlapping(
data[offset..].as_ptr(),
self.dma_stage,
len,
);
Self::flush_pixels(
self.io_handle,
0, y as u16,
DISPLAY_WIDTH, (y + h) as u16,
self.dma_stage,
len,
);
}
}
}
/// Write a rectangular region of RGB565 pixels.
pub fn draw_region(&self, x: u16, y: u16, w: u16, h: u16, data: &[u8]) {
unsafe {
Self::flush_pixels(
self.io_handle,
x, y,
x + w, y + h,
data.as_ptr(),
data.len(),
);
}
}
/// Fill entire screen with a solid RGB565 color.
pub fn fill_color(&self, color: u16) {
// Fill the DMA staging buffer with the color
let pixels = DISPLAY_WIDTH as usize * STRIP_LINES;
unsafe {
let stage_u16 = self.dma_stage as *mut u16;
for i in 0..pixels {
*stage_u16.add(i) = color;
}
}
for y in (0..DISPLAY_HEIGHT).step_by(STRIP_LINES) {
let h = STRIP_LINES.min((DISPLAY_HEIGHT - y) as usize) as u16;
let len = DISPLAY_WIDTH as usize * h as usize * 2;
unsafe {
Self::flush_pixels(
self.io_handle,
0, y,
DISPLAY_WIDTH, y + h,
self.dma_stage,
len,
);
}
}
}
// --- Private initialization helpers ---
unsafe fn init_i80_bus() -> Result<sys::esp_lcd_i80_bus_handle_t> {
let bus_config = sys::esp_lcd_i80_bus_config_t {
clk_src: sys::soc_periph_lcd_clk_src_t_LCD_CLK_SRC_PLL160M,
dc_gpio_num: PIN_DC,
wr_gpio_num: PIN_WR,
data_gpio_nums: [
PIN_D0, PIN_D1, PIN_D2, PIN_D3,
PIN_D4, PIN_D5, PIN_D6, PIN_D7,
-1, -1, -1, -1, -1, -1, -1, -1,
],
bus_width: 8,
max_transfer_bytes: STRIP_BYTES, // Must match strip size
sram_trans_align: 64,
__bindgen_anon_1: sys::esp_lcd_i80_bus_config_t__bindgen_ty_1 {
psram_trans_align: 64,
},
};
let mut bus_handle: sys::esp_lcd_i80_bus_handle_t = ptr::null_mut();
let ret = sys::esp_lcd_new_i80_bus(&bus_config, &mut bus_handle);
if ret != sys::ESP_OK {
bail!("Failed to create I80 bus: 0x{:x}", ret);
}
Ok(bus_handle)
}
unsafe fn init_panel_io(
bus_handle: sys::esp_lcd_i80_bus_handle_t,
) -> Result<sys::esp_lcd_panel_io_handle_t> {
let mut io_config: sys::esp_lcd_panel_io_i80_config_t = std::mem::zeroed();
io_config.cs_gpio_num = -1;
io_config.pclk_hz = 40_000_000; // 40MHz
io_config.trans_queue_depth = 10;
io_config.lcd_cmd_bits = 8;
io_config.lcd_param_bits = 8;
io_config.on_color_trans_done = None;
io_config.user_ctx = ptr::null_mut();
// DC levels — critical for I80
io_config.dc_levels.set_dc_idle_level(0);
io_config.dc_levels.set_dc_cmd_level(0);
io_config.dc_levels.set_dc_dummy_level(0);
io_config.dc_levels.set_dc_data_level(1);
// Swap bytes for RGB565 (ESP32 little-endian → display big-endian)
io_config.flags.set_swap_color_bytes(1);
let mut io_handle: sys::esp_lcd_panel_io_handle_t = ptr::null_mut();
let ret = sys::esp_lcd_new_panel_io_i80(bus_handle, &io_config, &mut io_handle);
if ret != sys::ESP_OK {
bail!("Failed to create panel IO: 0x{:x}", ret);
}
Ok(io_handle)
}
unsafe fn init_panel(
io_handle: sys::esp_lcd_panel_io_handle_t,
) -> Result<sys::esp_lcd_panel_handle_t> {
let mut panel_config: sys::esp_lcd_panel_dev_config_t = std::mem::zeroed();
panel_config.reset_gpio_num = -1; // Already reset via GPIO
panel_config.bits_per_pixel = 16;
panel_config.__bindgen_anon_1.rgb_ele_order =
sys::lcd_rgb_element_order_t_LCD_RGB_ELEMENT_ORDER_RGB;
let mut panel_handle: sys::esp_lcd_panel_handle_t = ptr::null_mut();
let ret = sys::esp_lcd_new_panel_st7789(io_handle, &panel_config, &mut panel_handle);
if ret != sys::ESP_OK {
bail!("Failed to create panel: 0x{:x}", ret);
}
// Software reset
sys::esp_lcd_panel_reset(panel_handle);
std::thread::sleep(std::time::Duration::from_millis(100));
// ST7796 vendor init commands
// Enable extension command 2
Self::send_cmd(io_handle, 0xF0, &[0xC3]);
Self::send_cmd(io_handle, 0xF0, &[0x96]);
// Display function control
Self::send_cmd(io_handle, 0xB4, &[0x01]);
Self::send_cmd(io_handle, 0xB6, &[0x80, 0x22, 0x3B]);
// Display output ctrl
Self::send_cmd(io_handle, 0xE8, &[0x40, 0x8A, 0x00, 0x00, 0x29, 0x19, 0xA5, 0x33]);
// Power control
Self::send_cmd(io_handle, 0xC1, &[0x06]);
Self::send_cmd(io_handle, 0xC2, &[0xA7]);
Self::send_cmd(io_handle, 0xC5, &[0x18]);
std::thread::sleep(std::time::Duration::from_millis(120));
// Gamma correction
Self::send_cmd(io_handle, 0xE0, &[
0xF0, 0x09, 0x0B, 0x06, 0x04, 0x15, 0x2F,
0x54, 0x42, 0x3C, 0x17, 0x14, 0x18, 0x1B,
]);
Self::send_cmd(io_handle, 0xE1, &[
0xE0, 0x09, 0x0B, 0x06, 0x04, 0x03, 0x2B,
0x43, 0x42, 0x3B, 0x16, 0x14, 0x17, 0x1B,
]);
std::thread::sleep(std::time::Duration::from_millis(120));
// Disable extension command 2
Self::send_cmd(io_handle, 0xF0, &[0x3C]);
Self::send_cmd(io_handle, 0xF0, &[0x69]);
// Exit sleep
Self::send_cmd(io_handle, 0x11, &[]);
std::thread::sleep(std::time::Duration::from_millis(130));
// 16-bit RGB565 pixel format
Self::send_cmd(io_handle, 0x3A, &[0x55]);
// Display inversion on (required for ST7796 correct colors)
sys::esp_lcd_panel_invert_color(panel_handle, true);
// Landscape: MADCTL MV|MY = 0xA0
sys::esp_lcd_panel_swap_xy(panel_handle, true);
sys::esp_lcd_panel_mirror(panel_handle, false, true);
sys::esp_lcd_panel_set_gap(panel_handle, 0, 0);
// Idle off
Self::send_cmd(io_handle, 0x38, &[]);
// Display on
sys::esp_lcd_panel_disp_on_off(panel_handle, true);
std::thread::sleep(std::time::Duration::from_millis(20));
Ok(panel_handle)
}
unsafe fn send_cmd(io_handle: sys::esp_lcd_panel_io_handle_t, cmd: u32, data: &[u8]) {
sys::esp_lcd_panel_io_tx_param(
io_handle,
cmd as i32,
data.as_ptr() as *const c_void,
data.len(),
);
}
unsafe fn flush_pixels(
io_handle: sys::esp_lcd_panel_io_handle_t,
x_start: u16, y_start: u16,
x_end: u16, y_end: u16,
data: *const u8, data_len: usize,
) {
// CASET (Column Address Set)
let caset: [u8; 4] = [
(x_start >> 8) as u8, (x_start & 0xFF) as u8,
((x_end - 1) >> 8) as u8, ((x_end - 1) & 0xFF) as u8,
];
sys::esp_lcd_panel_io_tx_param(
io_handle, 0x2A,
caset.as_ptr() as *const c_void, 4,
);
// RASET (Row Address Set)
let raset: [u8; 4] = [
(y_start >> 8) as u8, (y_start & 0xFF) as u8,
((y_end - 1) >> 8) as u8, ((y_end - 1) & 0xFF) as u8,
];
sys::esp_lcd_panel_io_tx_param(
io_handle, 0x2B,
raset.as_ptr() as *const c_void, 4,
);
// RAMWR + pixel data via DMA
sys::esp_lcd_panel_io_tx_color(
io_handle, 0x2C,
data as *const c_void, data_len,
);
}
}
impl Drop for Display {
fn drop(&mut self) {
if !self.dma_stage.is_null() {
unsafe { sys::heap_caps_free(self.dma_stage as *mut c_void); }
}
}
}
+712
View File
@@ -0,0 +1,712 @@
//! Android Auto frame protocol layer.
//!
//! This module implements the binary framing protocol used by Android Auto.
//! Rewritten from the upstream `android-auto` crate to use synchronous `std::io`
//! instead of `tokio::io`, making it compatible with ESP-IDF.
use std::io::{Read, Write};
use anyhow::{Context, Result, bail};
/// The Android Auto protocol version we support.
pub const VERSION: (u16, u16) = (1, 1);
/// Maximum payload size for a single frame.
pub const MAX_FRAME_DATA_SIZE: usize = 0x4000;
/// Channel identifier type — each channel gets a u8 ID.
pub type ChannelId = u8;
// ---------------------------------------------------------------------------
// Frame header
// ---------------------------------------------------------------------------
/// Specifies how a frame relates to a multi-frame packet.
#[derive(Debug, Clone, Copy, PartialEq)]
#[repr(u8)]
pub enum FrameHeaderType {
Middle = 0,
First = 1,
Last = 2,
Single = 3,
}
impl From<u8> for FrameHeaderType {
fn from(value: u8) -> Self {
match value & 3 {
0 => Self::Middle,
1 => Self::First,
2 => Self::Last,
_ => Self::Single,
}
}
}
impl From<FrameHeaderType> for u8 {
fn from(value: FrameHeaderType) -> Self {
value as u8
}
}
#[allow(missing_docs)]
mod frame_header_bits {
bitfield::bitfield! {
#[derive(Copy, Clone)]
pub struct FrameHeaderContents(u8);
impl Debug;
impl new;
u8;
/// True indicates the frame is encrypted.
pub get_encryption, set_encryption: 3;
/// The frame header type.
pub from into super::FrameHeaderType, get_frame_type, set_frame_type: 1, 0;
/// True when frame is for control (common), false when channel-specific.
pub get_control, set_control: 2;
}
}
pub use frame_header_bits::FrameHeaderContents;
/// A frame header: channel id + flags byte.
#[derive(Copy, Clone, Debug)]
pub struct FrameHeader {
pub channel_id: ChannelId,
pub frame: FrameHeaderContents,
}
impl FrameHeader {
/// Serialize this header into the given buffer.
pub fn write_to(&self, buf: &mut Vec<u8>) {
buf.push(self.channel_id);
buf.push(self.frame.0);
}
}
// ---------------------------------------------------------------------------
// Frame
// ---------------------------------------------------------------------------
/// A reassembled Android Auto frame (multi-frame packets are combined).
#[derive(Debug)]
pub struct Frame {
pub header: FrameHeader,
pub data: Vec<u8>,
}
impl Frame {
/// Build the raw bytes for this frame, ready to write to the wire.
/// If `tls` is Some, the payload is encrypted first.
pub fn to_wire_bytes(&self, tls: Option<&mut TlsState>) -> Vec<u8> {
let mut buf = Vec::new();
self.header.write_to(&mut buf);
if self.header.frame.get_encryption() {
if let Some(tls) = tls {
let encrypted = tls.encrypt(&self.data);
buf.extend_from_slice(&(encrypted.len() as u16).to_be_bytes());
buf.extend_from_slice(&encrypted);
} else {
panic!("Frame requires encryption but no TLS state provided");
}
} else {
buf.extend_from_slice(&(self.data.len() as u16).to_be_bytes());
buf.extend_from_slice(&self.data);
}
buf
}
/// Create a simple single-frame (encrypted, channel-specific).
pub fn new_encrypted(channel_id: ChannelId, data: Vec<u8>) -> Self {
Self {
header: FrameHeader {
channel_id,
frame: FrameHeaderContents::new(true, FrameHeaderType::Single, false),
},
data,
}
}
/// Create a simple single-frame (unencrypted, control channel).
pub fn new_control_unencrypted(data: Vec<u8>) -> Self {
Self {
header: FrameHeader {
channel_id: 0,
frame: FrameHeaderContents::new(false, FrameHeaderType::Single, false),
},
data,
}
}
/// Create a simple single-frame (encrypted, control channel).
pub fn new_control_encrypted(data: Vec<u8>) -> Self {
Self {
header: FrameHeader {
channel_id: 0,
frame: FrameHeaderContents::new(true, FrameHeaderType::Single, false),
},
data,
}
}
/// Create a common (control-bit set) encrypted frame for a specific channel.
pub fn new_common_response(channel_id: ChannelId, data: Vec<u8>) -> Self {
Self {
header: FrameHeader {
channel_id,
frame: FrameHeaderContents::new(true, FrameHeaderType::Single, true),
},
data,
}
}
}
// ---------------------------------------------------------------------------
// Frame reader — synchronous, works over any std::io::Read
// ---------------------------------------------------------------------------
/// Accumulates multi-frame packets into complete frames.
pub struct FrameReader {
rx_sofar: Vec<Vec<u8>>,
}
impl FrameReader {
pub fn new() -> Self {
Self {
rx_sofar: Vec::new(),
}
}
/// Read one complete frame from the stream.
/// Handles multi-frame reassembly transparently.
/// The `tls` state is used to decrypt encrypted frames.
pub fn read_frame<R: Read>(
&mut self,
stream: &mut R,
tls: &mut TlsState,
) -> Result<Frame> {
loop {
// Read header (2 bytes)
let mut hdr = [0u8; 2];
stream.read_exact(&mut hdr).context("reading frame header")?;
let channel_id = hdr[0];
let mut flags = FrameHeaderContents::new(false, FrameHeaderType::Single, false);
flags.0 = hdr[1];
let header = FrameHeader {
channel_id,
frame: flags,
};
// Read length
let frame_type = header.frame.get_frame_type();
let len = if frame_type == FrameHeaderType::First {
// First frame of multi-frame has 6-byte header (2 len + 4 total len)
let mut p = [0u8; 6];
stream.read_exact(&mut p).context("reading first-frame length")?;
u16::from_be_bytes([p[0], p[1]])
} else {
let mut p = [0u8; 2];
stream.read_exact(&mut p).context("reading frame length")?;
u16::from_be_bytes(p)
};
// Read payload
let mut payload = vec![0u8; len as usize];
stream.read_exact(&mut payload).context("reading frame payload")?;
// Decrypt if needed
let plaintext = if header.frame.get_encryption() {
tls.decrypt(&payload)?
} else {
payload
};
// Reassemble multi-frame packets
match frame_type {
FrameHeaderType::Single => {
return Ok(Frame {
header,
data: plaintext,
});
}
FrameHeaderType::First => {
self.rx_sofar.clear();
self.rx_sofar.push(plaintext);
}
FrameHeaderType::Middle => {
self.rx_sofar.push(plaintext);
}
FrameHeaderType::Last => {
self.rx_sofar.push(plaintext);
let data: Vec<u8> = self.rx_sofar.drain(..).flatten().collect();
return Ok(Frame { header, data });
}
}
}
}
}
// ---------------------------------------------------------------------------
// Frame writer — synchronous
// ---------------------------------------------------------------------------
/// Writes frames to the stream, encrypting as needed.
pub fn write_frame<W: Write>(
stream: &mut W,
frame: &Frame,
tls: &mut TlsState,
) -> Result<()> {
let bytes = frame.to_wire_bytes(Some(tls));
stream.write_all(&bytes).context("writing frame")?;
stream.flush().context("flushing frame")?;
Ok(())
}
// ---------------------------------------------------------------------------
// TLS state — wraps ESP-IDF mbedtls for Android Auto encryption
// ---------------------------------------------------------------------------
//
// Android Auto tunnels TLS records inside its own frame protocol.
// The head unit acts as the TLS **client**; the phone is the TLS server.
// We use mbedtls with buffer-based I/O (no socket) so we can manually
// feed/drain TLS records to/from the AA frame layer.
//
// Key protocol details (from upstream crate analysis):
// - TLS 1.2 with ECDHE_RSA_AES_GCM ciphers
// - Mutual TLS: head unit presents client cert from cert.rs
// - Server cert verification is DISABLED (phone cert accepted blindly)
// - Certs are X.509 v1 (Google Automotive Link CA, 2014)
use std::ffi::c_void;
/// Buffer-based I/O context for mbedtls send/recv callbacks.
///
/// mbedtls calls our `bio_send` / `bio_recv` callbacks with a pointer
/// to this struct. We push outgoing TLS records to `tx` and feed
/// incoming TLS records via `rx`.
struct BioBuf {
/// Incoming data (phone → mbedtls): fed by `process_handshake` / `decrypt`
rx: Vec<u8>,
rx_pos: usize,
/// Outgoing data (mbedtls → phone): drained by `start_handshake` / `encrypt`
tx: Vec<u8>,
}
impl BioBuf {
fn new() -> Self {
Self {
rx: Vec::with_capacity(8192),
rx_pos: 0,
tx: Vec::with_capacity(8192),
}
}
/// Feed incoming data that mbedtls will read via bio_recv.
fn feed(&mut self, data: &[u8]) {
// Compact if all previous data consumed
if self.rx_pos > 0 && self.rx_pos == self.rx.len() {
self.rx.clear();
self.rx_pos = 0;
}
self.rx.extend_from_slice(data);
}
/// Drain all outgoing data produced by mbedtls via bio_send.
fn drain_tx(&mut self) -> Vec<u8> {
std::mem::take(&mut self.tx)
}
fn rx_available(&self) -> usize {
self.rx.len() - self.rx_pos
}
}
/// mbedtls send callback: stores outgoing TLS records in BioBuf.tx
unsafe extern "C" fn bio_send(
ctx: *mut c_void,
buf: *const u8,
len: usize,
) -> i32 {
let bio = &mut *(ctx as *mut BioBuf);
let data = std::slice::from_raw_parts(buf, len);
bio.tx.extend_from_slice(data);
len as i32
}
/// mbedtls recv callback: reads incoming TLS records from BioBuf.rx
unsafe extern "C" fn bio_recv(
ctx: *mut c_void,
buf: *mut u8,
len: usize,
) -> i32 {
let bio = &mut *(ctx as *mut BioBuf);
let available = bio.rx_available();
if available == 0 {
return esp_idf_sys::MBEDTLS_ERR_SSL_WANT_READ;
}
let to_copy = std::cmp::min(available, len);
std::ptr::copy_nonoverlapping(
bio.rx.as_ptr().add(bio.rx_pos),
buf,
to_copy,
);
bio.rx_pos += to_copy;
to_copy as i32
}
/// Wraps TLS encryption/decryption state using ESP-IDF's mbedtls.
///
/// Uses buffer-based I/O so TLS records can be tunneled inside
/// Android Auto frames rather than operating on a raw socket.
pub struct TlsState {
/// Whether the TLS handshake has completed successfully.
pub handshake_complete: bool,
/// The mbedtls state, initialized lazily on first handshake.
inner: Option<TlsInner>,
}
/// Heap-allocated mbedtls contexts. Each is individually boxed to ensure
/// stable addresses (mbedtls stores internal self-referential pointers).
struct TlsInner {
ssl: *mut esp_idf_sys::mbedtls_ssl_context,
conf: *mut esp_idf_sys::mbedtls_ssl_config,
cacert: *mut esp_idf_sys::mbedtls_x509_crt,
clicert: *mut esp_idf_sys::mbedtls_x509_crt,
pkey: *mut esp_idf_sys::mbedtls_pk_context,
ctr_drbg: *mut esp_idf_sys::mbedtls_ctr_drbg_context,
entropy: *mut esp_idf_sys::mbedtls_entropy_context,
bio: *mut BioBuf,
}
impl TlsInner {
/// Initialize all mbedtls contexts, load certs, configure as TLS client.
fn new() -> Result<Self> {
use crate::cert;
unsafe {
// Allocate all contexts individually on the heap
let ssl = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_ssl_context>()));
let conf = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_ssl_config>()));
let cacert = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_x509_crt>()));
let clicert = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_x509_crt>()));
let pkey = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_pk_context>()));
let ctr_drbg = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_ctr_drbg_context>()));
let entropy = Box::into_raw(Box::new(std::mem::zeroed::<esp_idf_sys::mbedtls_entropy_context>()));
let bio = Box::into_raw(Box::new(BioBuf::new()));
// Initialize all contexts
esp_idf_sys::mbedtls_ssl_init(ssl);
esp_idf_sys::mbedtls_ssl_config_init(conf);
esp_idf_sys::mbedtls_x509_crt_init(cacert);
esp_idf_sys::mbedtls_x509_crt_init(clicert);
esp_idf_sys::mbedtls_pk_init(pkey);
esp_idf_sys::mbedtls_ctr_drbg_init(ctr_drbg);
esp_idf_sys::mbedtls_entropy_init(entropy);
// Seed the random number generator
let pers = b"android_auto_tls";
let ret = esp_idf_sys::mbedtls_ctr_drbg_seed(
ctr_drbg,
Some(esp_idf_sys::mbedtls_entropy_func),
entropy as *mut c_void,
pers.as_ptr(),
pers.len(),
);
if ret != 0 {
bail!("mbedtls_ctr_drbg_seed failed: -0x{:04X}", -ret);
}
// Load CA certificate (Google Automotive Link root)
// mbedtls_x509_crt_parse expects null-terminated PEM
let mut ca_pem = cert::AAUTO_CERT.to_vec();
ca_pem.push(0);
let ret = esp_idf_sys::mbedtls_x509_crt_parse(
cacert,
ca_pem.as_ptr(),
ca_pem.len(),
);
if ret != 0 {
log::warn!("CA cert parse returned {} (may be OK for v1 certs)", ret);
}
// Load client certificate
let mut cli_pem = cert::CERTIFICATE.to_vec();
cli_pem.push(0);
let ret = esp_idf_sys::mbedtls_x509_crt_parse(
clicert,
cli_pem.as_ptr(),
cli_pem.len(),
);
if ret != 0 {
bail!("Client cert parse failed: -0x{:04X}", -ret);
}
// Load client private key
let mut key_pem = cert::PRIVATE_KEY.to_vec();
key_pem.push(0);
let ret = esp_idf_sys::mbedtls_pk_parse_key(
pkey,
key_pem.as_ptr(),
key_pem.len(),
std::ptr::null(), // no password
0,
Some(esp_idf_sys::mbedtls_ctr_drbg_random),
ctr_drbg as *mut c_void,
);
if ret != 0 {
bail!("Private key parse failed: -0x{:04X}", -ret);
}
// Configure as TLS client, stream transport, default preset
let ret = esp_idf_sys::mbedtls_ssl_config_defaults(
conf,
esp_idf_sys::MBEDTLS_SSL_IS_CLIENT as i32,
esp_idf_sys::MBEDTLS_SSL_TRANSPORT_STREAM as i32,
esp_idf_sys::MBEDTLS_SSL_PRESET_DEFAULT as i32,
);
if ret != 0 {
bail!("mbedtls_ssl_config_defaults failed: -0x{:04X}", -ret);
}
// Force TLS 1.2 only (no TLS 1.3) — matches aa-proxy-rs behavior
// mbedtls_ssl_conf_max/min_tls_version are static inline, so set fields directly
(*conf).private_max_tls_version = esp_idf_sys::mbedtls_ssl_protocol_version_MBEDTLS_SSL_VERSION_TLS1_2;
(*conf).private_min_tls_version = esp_idf_sys::mbedtls_ssl_protocol_version_MBEDTLS_SSL_VERSION_TLS1_2;
// Skip server certificate verification (matches upstream behavior)
esp_idf_sys::mbedtls_ssl_conf_authmode(
conf,
esp_idf_sys::MBEDTLS_SSL_VERIFY_NONE as i32,
);
// Set CA chain and client certificate
esp_idf_sys::mbedtls_ssl_conf_ca_chain(conf, cacert, std::ptr::null_mut());
let ret = esp_idf_sys::mbedtls_ssl_conf_own_cert(conf, clicert, pkey);
if ret != 0 {
bail!("mbedtls_ssl_conf_own_cert failed: -0x{:04X}", -ret);
}
// Set RNG
esp_idf_sys::mbedtls_ssl_conf_rng(
conf,
Some(esp_idf_sys::mbedtls_ctr_drbg_random),
ctr_drbg as *mut c_void,
);
// Apply config to SSL context
let ret = esp_idf_sys::mbedtls_ssl_setup(ssl, conf);
if ret != 0 {
bail!("mbedtls_ssl_setup failed: -0x{:04X}", -ret);
}
// Set hostname (dummy — server cert not verified anyway)
let hostname = std::ffi::CString::new("androidauto").unwrap();
esp_idf_sys::mbedtls_ssl_set_hostname(ssl, hostname.as_ptr());
// Set buffer-based I/O callbacks
esp_idf_sys::mbedtls_ssl_set_bio(
ssl,
bio as *mut c_void,
Some(bio_send),
Some(bio_recv),
None, // no recv_timeout
);
log::info!("mbedtls TLS client initialized (certs loaded, VERIFY_NONE)");
Ok(TlsInner {
ssl,
conf,
cacert,
clicert,
pkey,
ctr_drbg,
entropy,
bio,
})
}
}
/// Run handshake steps until mbedtls needs more data or completes.
/// Returns (output_bytes, is_complete).
fn run_handshake_steps(&mut self) -> Result<(Vec<u8>, bool)> {
unsafe {
let mut step_count = 0u32;
loop {
let state_before = (*self.ssl).private_state;
let ret = esp_idf_sys::mbedtls_ssl_handshake_step(self.ssl);
let state_after = (*self.ssl).private_state;
step_count += 1;
log::debug!("TLS handshake step {}: state {} -> {}, ret={}", step_count, state_before, state_after, ret);
if ret == 0 {
// Check if handshake is fully complete
if state_after == esp_idf_sys::mbedtls_ssl_states_MBEDTLS_SSL_HANDSHAKE_OVER as i32 {
let output = (*self.bio).drain_tx();
log::info!("TLS: handshake OVER after {} steps ({} bytes output)", step_count, output.len());
return Ok((output, true));
}
// Step succeeded but handshake not done — continue
continue;
} else if ret == esp_idf_sys::MBEDTLS_ERR_SSL_WANT_READ {
// mbedtls needs more data from the phone
let output = (*self.bio).drain_tx();
log::info!("TLS: WANT_READ after {} steps, state={}, {} bytes output", step_count, state_after, output.len());
return Ok((output, false));
} else {
log::error!("TLS handshake step failed at state {}: -0x{:04X}", state_after, -ret);
bail!("mbedtls_ssl_handshake_step failed: -0x{:04X}", -ret);
}
}
}
}
}
impl Drop for TlsInner {
fn drop(&mut self) {
unsafe {
esp_idf_sys::mbedtls_ssl_free(self.ssl);
esp_idf_sys::mbedtls_ssl_config_free(self.conf);
esp_idf_sys::mbedtls_x509_crt_free(self.cacert);
esp_idf_sys::mbedtls_x509_crt_free(self.clicert);
esp_idf_sys::mbedtls_pk_free(self.pkey);
esp_idf_sys::mbedtls_ctr_drbg_free(self.ctr_drbg);
esp_idf_sys::mbedtls_entropy_free(self.entropy);
// Reconstruct Boxes so they get freed
let _ = Box::from_raw(self.ssl);
let _ = Box::from_raw(self.conf);
let _ = Box::from_raw(self.cacert);
let _ = Box::from_raw(self.clicert);
let _ = Box::from_raw(self.pkey);
let _ = Box::from_raw(self.ctr_drbg);
let _ = Box::from_raw(self.entropy);
let _ = Box::from_raw(self.bio);
}
log::info!("mbedtls TLS context freed");
}
}
impl TlsState {
/// Create a new TLS state (not yet connected).
pub fn new() -> Self {
Self {
handshake_complete: false,
inner: None,
}
}
/// Start the TLS handshake. Initializes mbedtls and generates the
/// ClientHello. Returns the TLS records to send to the phone inside
/// an SslHandshake control message.
pub fn start_handshake(&mut self) -> Result<Option<Vec<u8>>> {
log::info!("TLS: starting handshake (mbedtls client)");
let inner = TlsInner::new()?;
self.inner = Some(inner);
let inner = self.inner.as_mut().unwrap();
let (output, complete) = inner.run_handshake_steps()?;
if complete {
self.handshake_complete = true;
log::info!("TLS: handshake completed in one step (unlikely but handled)");
}
if output.is_empty() {
Ok(None)
} else {
log::info!("TLS: ClientHello generated ({} bytes)", output.len());
Ok(Some(output))
}
}
/// Process TLS handshake data received from the phone.
/// Returns response TLS records to send back (if any).
pub fn process_handshake(&mut self, incoming: &[u8]) -> Result<Option<Vec<u8>>> {
let inner = self.inner.as_mut()
.ok_or_else(|| anyhow::anyhow!("TLS not initialized — call start_handshake first"))?;
// Feed incoming TLS records to mbedtls
unsafe { (*inner.bio).feed(incoming); }
let (output, complete) = inner.run_handshake_steps()?;
if complete {
self.handshake_complete = true;
log::info!("TLS: handshake complete — encryption active");
}
if output.is_empty() {
Ok(None)
} else {
log::info!("TLS: handshake response ({} bytes)", output.len());
Ok(Some(output))
}
}
/// Encrypt plaintext data for sending to the phone.
/// Returns TLS records (ciphertext) to be sent in an encrypted AA frame.
pub fn encrypt(&mut self, plaintext: &[u8]) -> Vec<u8> {
let inner = match self.inner.as_mut() {
Some(i) => i,
None => {
log::warn!("TLS encrypt called but not initialized — passing through");
return plaintext.to_vec();
}
};
if !self.handshake_complete {
log::warn!("TLS encrypt called before handshake complete — passing through");
return plaintext.to_vec();
}
unsafe {
let ret = esp_idf_sys::mbedtls_ssl_write(
inner.ssl,
plaintext.as_ptr(),
plaintext.len(),
);
if ret < 0 {
log::error!("mbedtls_ssl_write failed: -0x{:04X}", -ret);
return plaintext.to_vec(); // fallback: pass through
}
(*inner.bio).drain_tx()
}
}
/// Decrypt ciphertext received from the phone.
/// Feed TLS records and return the decrypted plaintext.
pub fn decrypt(&mut self, ciphertext: &[u8]) -> Result<Vec<u8>> {
let inner = match self.inner.as_mut() {
Some(i) => i,
None => {
log::warn!("TLS decrypt called but not initialized — passing through");
return Ok(ciphertext.to_vec());
}
};
if !self.handshake_complete {
log::warn!("TLS decrypt called before handshake complete — passing through");
return Ok(ciphertext.to_vec());
}
unsafe {
// Feed ciphertext to mbedtls
(*inner.bio).feed(ciphertext);
// Read decrypted plaintext
let mut plaintext = vec![0u8; ciphertext.len() + 1024]; // generous buffer
let ret = esp_idf_sys::mbedtls_ssl_read(
inner.ssl,
plaintext.as_mut_ptr(),
plaintext.len(),
);
if ret == esp_idf_sys::MBEDTLS_ERR_SSL_WANT_READ {
// Not enough data for a complete TLS record yet
return Ok(Vec::new());
}
if ret < 0 {
bail!("mbedtls_ssl_read failed: -0x{:04X}", -ret);
}
plaintext.truncate(ret as usize);
Ok(plaintext)
}
}
}
+402
View File
@@ -0,0 +1,402 @@
//! ESP32 Android Auto Navigation Head Unit
//!
//! A navigation-focused Android Auto head unit for ESP32-S3 (WT32-SC01 Plus).
//!
//! Architecture:
//! 1. ESP32 starts WiFi AP (phone joins our network)
//! 2. Phone connects via TCP to our IP on port 5277
//! 3. TLS + protobuf protocol runs over TCP
//! 4. Navigation events are received and displayed on screen
//! 5. Video frames are decoded (esp_h264) and displayed
//!
//! Note: ESP32-S3 does NOT support Bluetooth Classic. The standard Android
//! Auto wireless pairing via BT SPP is not available. The phone must join
//! the ESP32's WiFi AP manually and connect directly.
use std::sync::mpsc;
use std::thread;
use std::time::Duration;
use anyhow::{Context, Result};
use esp_idf_hal::peripherals::Peripherals;
use esp_idf_svc::eventloop::EspSystemEventLoop;
use esp_idf_svc::nvs::EspDefaultNvsPartition;
use esp_idf_svc::wifi::{
AccessPointConfiguration, AuthMethod, BlockingWifi, Configuration, EspWifi,
};
mod bluetooth;
mod cert;
mod channels;
mod common;
mod config;
mod control;
mod decoder;
mod display;
mod frame;
mod mdns;
mod navigation;
mod session;
mod touch;
/// Protobuf-generated modules.
#[allow(missing_docs, clippy::missing_docs_in_private_items)]
mod proto {
include!(concat!(env!("OUT_DIR"), "/protobuf/mod.rs"));
}
fn main() -> Result<()> {
// Initialize ESP-IDF system
esp_idf_svc::sys::link_patches();
esp_idf_svc::log::EspLogger::initialize_default();
log::info!("=== ESP32 Android Auto Navigation Head Unit ===");
// Check PSRAM availability
let free_psram = unsafe {
esp_idf_sys::heap_caps_get_free_size(esp_idf_sys::MALLOC_CAP_SPIRAM)
};
log::info!("PSRAM available: {} KB", free_psram / 1024);
if free_psram < 1024 * 1024 {
log::error!("PSRAM too small or not detected! Need >= 1MB, got {} KB", free_psram / 1024);
}
// Take hardware peripherals (once, at startup)
let peripherals = Peripherals::take().context("taking peripherals")?;
// Initialize display (consumes reset + backlight pins)
let lcd = display::Display::init(
peripherals.pins.gpio4,
peripherals.pins.gpio45,
).context("initializing display")?;
lcd.fill_color(0x0000); // Black screen
log::info!("Display ready — black screen");
// Initialize touch controller (I2C0, SDA=GPIO6, SCL=GPIO5)
let touch_driver = touch::Touch::new(
peripherals.i2c0,
peripherals.pins.gpio6,
peripherals.pins.gpio5,
).context("initializing touch")?;
let sys_loop = EspSystemEventLoop::take().context("taking event loop")?;
let nvs = EspDefaultNvsPartition::take().context("taking NVS partition")?;
let hu_config = config::HeadUnitConfig::default();
let wifi_config = config::WifiConfig::default();
// Start WiFi AP — persistent for entire runtime
let wifi = init_wifi_ap(peripherals.modem, sys_loop, nvs, &wifi_config)?;
// Read the actual AP IP from the network interface
let local_ip = wifi
.wifi()
.ap_netif()
.get_ip_info()
.map(|info| info.ip.to_string())
.unwrap_or_else(|_| "192.168.4.1".into());
// Read MAC address from the WiFi driver
let mac = get_ap_mac(&wifi);
log::info!(
"WiFi AP ready: SSID='{}', IP={}, MAC={}",
wifi_config.ssid,
local_ip,
mac
);
// Note: ESP32-S3 does not support BT Classic — skip BT pairing
log::warn!("ESP32-S3: No Bluetooth Classic — phone must join WiFi AP manually");
// Advertise via mDNS so companion apps can discover us
let _mdns = mdns::start_mdns_service(
"esp32-aa-hu",
"ESP32 Android Auto",
wifi_config.listen_port,
&mac,
);
match &_mdns {
Ok(_) => log::info!("mDNS service registered"),
Err(e) => log::warn!("mDNS failed (non-fatal): {:?}", e),
}
// Channel for navigation events → UI thread
let (nav_tx, nav_rx) = mpsc::channel::<navigation::NavEvent>();
// Channel for decoded video frames → display thread
let (video_tx, video_rx) = mpsc::channel::<session::VideoFrame>();
// Spawn navigation UI thread
let _ui_thread = thread::Builder::new()
.name("nav-ui".into())
.stack_size(8192)
.spawn(move || {
nav_ui_loop(nav_rx);
})
.context("spawning UI thread")?;
// Spawn video display thread — owns the LCD
let _video_thread = thread::Builder::new()
.name("video-display".into())
.stack_size(8192)
.spawn(move || {
video_display_loop(video_rx, lcd);
})
.context("spawning video display thread")?;
// Spawn touch polling thread — sends touch coords to AA session
let (touch_tx, _touch_rx) = mpsc::channel::<touch::TouchEvent>();
let _touch_thread = thread::Builder::new()
.name("touch-poll".into())
.stack_size(4096)
.spawn(move || {
touch_poll_loop(touch_driver, touch_tx);
})
.context("spawning touch thread")?;
// Main loop: dual-mode connection
// 1. Listen for incoming TCP connections (phone connects to us)
// 2. Also try connecting to DHCP clients on port 5277 (phone's "head unit server" mode)
loop {
if let Err(e) = run_connection_cycle(
&hu_config,
&wifi_config,
&local_ip,
&mac,
&nav_tx,
&video_tx,
) {
log::error!("Connection cycle failed: {:?}", e);
log::info!("Restarting in 5 seconds...");
thread::sleep(Duration::from_secs(5));
}
}
}
/// Initialize WiFi in Access Point mode.
///
/// The ESP32 hosts the WiFi network. The phone joins it and connects
/// via TCP to port 5277.
fn init_wifi_ap(
modem: esp_idf_hal::modem::Modem<'static>,
sys_loop: EspSystemEventLoop,
nvs: EspDefaultNvsPartition,
config: &config::WifiConfig,
) -> Result<BlockingWifi<EspWifi<'static>>> {
log::info!("Starting WiFi AP: SSID='{}'", config.ssid);
let esp_wifi = EspWifi::new(modem, sys_loop.clone(), Some(nvs))
.context("creating EspWifi")?;
let mut wifi = BlockingWifi::wrap(esp_wifi, sys_loop)
.context("wrapping BlockingWifi")?;
let ap_config = AccessPointConfiguration {
ssid: config
.ssid
.as_str()
.try_into()
.map_err(|_| anyhow::anyhow!("SSID too long"))?,
password: config
.password
.as_str()
.try_into()
.map_err(|_| anyhow::anyhow!("Password too long"))?,
auth_method: AuthMethod::WPA2Personal,
max_connections: 1,
..Default::default()
};
wifi.set_configuration(&Configuration::AccessPoint(ap_config))
.context("setting WiFi AP config")?;
wifi.start().context("starting WiFi AP")?;
log::info!("WiFi AP started successfully");
Ok(wifi)
}
/// Read the AP interface MAC address as a formatted string.
fn get_ap_mac(wifi: &BlockingWifi<EspWifi<'static>>) -> String {
// Try to read MAC from the AP network interface
if let Ok(info) = wifi.wifi().ap_netif().get_ip_info() {
log::info!("AP IP info: {:?}", info);
}
// Get MAC via raw ESP-IDF call
let mut mac = [0u8; 6];
let ret = unsafe {
esp_idf_sys::esp_wifi_get_mac(
esp_idf_sys::wifi_interface_t_WIFI_IF_AP,
mac.as_mut_ptr(),
)
};
if ret == esp_idf_sys::ESP_OK as i32 {
format!(
"{:02X}:{:02X}:{:02X}:{:02X}:{:02X}:{:02X}",
mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]
)
} else {
log::warn!("Failed to read AP MAC address (err={})", ret);
"00:00:00:00:00:00".into()
}
}
/// One full connection cycle:
/// - Listen for incoming TCP on port 5277 (phone connects to us)
/// - Simultaneously try connecting to DHCP clients on port 5277
/// (phone's "Start head unit server" developer mode)
fn run_connection_cycle(
hu_config: &config::HeadUnitConfig,
wifi_config: &config::WifiConfig,
local_ip: &str,
mac: &str,
nav_tx: &mpsc::Sender<navigation::NavEvent>,
video_tx: &mpsc::Sender<session::VideoFrame>,
) -> Result<()> {
log::info!(
"Waiting for Android Auto connection (listen on :{} + scanning clients)...",
wifi_config.listen_port
);
// Set up non-blocking listener
let listener = std::net::TcpListener::bind(format!("0.0.0.0:{}", wifi_config.listen_port))
.context("binding TCP listener")?;
listener.set_nonblocking(true)?;
// Scan DHCP range for phones with "head unit server" running
// WT32-SC01 Plus DHCP range is typically .2-.15
let client_ips: Vec<String> = (2..=15)
.map(|i| {
let base = local_ip.rsplit_once('.').map(|(b, _)| b).unwrap_or("192.168.71");
format!("{}:{}", format!("{}.{}", base, i), wifi_config.listen_port)
})
.collect();
loop {
// Check for incoming connections (non-blocking)
match listener.accept() {
Ok((mut tcp_stream, peer_addr)) => {
log::info!("Phone connected to us from {}", peer_addr);
tcp_stream.set_nodelay(true)?;
tcp_stream.set_nonblocking(false)?;
log::info!("Starting Android Auto protocol session...");
session::run_session(&mut tcp_stream, hu_config, nav_tx, video_tx)?;
log::info!("Session ended cleanly");
return Ok(());
}
Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => {
// No incoming connection yet — try outbound
}
Err(e) => return Err(e).context("accepting TCP connection"),
}
// Try connecting to each potential client IP
for addr in &client_ips {
if let Ok(mut tcp_stream) = std::net::TcpStream::connect_timeout(
&addr.parse().unwrap(),
Duration::from_millis(100),
) {
log::info!("Connected to phone's head unit server at {}", addr);
tcp_stream.set_nodelay(true)?;
log::info!("Starting Android Auto protocol session...");
session::run_session(&mut tcp_stream, hu_config, nav_tx, video_tx)?;
log::info!("Session ended cleanly");
return Ok(());
}
}
// Brief pause before next scan
thread::sleep(Duration::from_millis(500));
}
}
/// Navigation UI loop — receives NavEvents and updates the display.
fn nav_ui_loop(nav_rx: mpsc::Receiver<navigation::NavEvent>) {
log::info!("Navigation UI thread started");
loop {
match nav_rx.recv() {
Ok(event) => {
match &event {
navigation::NavEvent::StatusChanged(status) => {
log::info!("🧭 Nav status: {:?}", status);
}
navigation::NavEvent::TurnInstruction(turn) => {
log::info!(
"🔄 Turn: {} — {:?} {:?} (image: {} bytes)",
turn.street_name,
turn.direction,
turn.maneuver,
turn.turn_image.len()
);
// TODO: Render turn image + text on Slint UI
}
navigation::NavEvent::DistanceUpdate(dist) => {
log::info!(
"📏 Distance: {}m, {} {:?}, ETA: {}s",
dist.meters,
dist.distance_to_step_millis,
dist.unit,
dist.time_to_step_seconds
);
// TODO: Update distance display on Slint UI
}
}
}
Err(_) => {
log::info!("Nav event channel closed, UI thread exiting");
break;
}
}
}
}
/// Video display loop — receives decoded RGB565 frames and sends them to the LCD.
fn video_display_loop(video_rx: mpsc::Receiver<session::VideoFrame>, lcd: display::Display) {
log::info!("Video display thread started");
let mut frame_count: u64 = 0;
loop {
// Use timeout so this thread doesn't block forever when no frames arrive
match video_rx.recv_timeout(Duration::from_secs(10)) {
Ok(frame) => {
frame_count += 1;
// Log every frame for now (test pattern is infrequent)
log::info!(
"🖥️ Display frame #{} ({} pixels)",
frame_count,
frame.pixels.len()
);
// Send RGB565 framebuffer to display
let bytes = unsafe {
std::slice::from_raw_parts(
frame.pixels.as_ptr() as *const u8,
frame.pixels.len() * 2,
)
};
lcd.draw_rgb565(bytes);
}
Err(mpsc::RecvTimeoutError::Timeout) => {
log::debug!("Video display: no frames for 10s (waiting...)");
}
Err(mpsc::RecvTimeoutError::Disconnected) => {
log::info!("Video frame channel closed, display thread exiting");
break;
}
}
}
}
/// Touch polling loop — reads FT6336U at ~30Hz and sends events.
fn touch_poll_loop(mut touch: touch::Touch<'static>, tx: mpsc::Sender<touch::TouchEvent>) {
log::info!("Touch polling thread started");
loop {
if let Some(event) = touch.poll() {
if event.pressed {
log::debug!("Touch: ({}, {})", event.x, event.y);
}
let _ = tx.send(event);
}
std::thread::sleep(Duration::from_millis(33)); // ~30Hz
}
}
+64
View File
@@ -0,0 +1,64 @@
//! mDNS service advertisement for Android Auto discovery.
//!
//! Since ESP32-S3 does not support Bluetooth Classic (no BT SPP),
//! we cannot use the standard Android Auto wireless pairing flow
//! (UUID `4de17a00-52cb-11e6-bdf4-0800200c9a66` over RFCOMM).
//!
//! Instead, we advertise an `_androidauto._tcp` mDNS service so that
//! companion apps or modified AA clients on the phone can discover
//! the head unit on the local network.
//!
//! Service details:
//! - Type: `_androidauto._tcp`
//! - Port: 5277 (Android Auto TCP port)
//! - TXT records: device name, version, MAC address
use anyhow::{Context, Result};
use esp_idf_svc::mdns::EspMdns;
/// Advertise the Android Auto head unit via mDNS.
///
/// This registers a `_androidauto._tcp` service on the given port
/// with TXT records describing the head unit.
///
/// The returned `EspMdns` must be kept alive for the duration of
/// the advertisement — dropping it stops mDNS.
pub fn start_mdns_service(
hostname: &str,
instance_name: &str,
port: u16,
mac: &str,
) -> Result<EspMdns> {
let mut mdns = EspMdns::take().context("initializing mDNS")?;
mdns.set_hostname(hostname)
.context("setting mDNS hostname")?;
mdns.set_instance_name(instance_name)
.context("setting mDNS instance name")?;
// Advertise _androidauto._tcp on the AA listen port
let txt_records: &[(&str, &str)] = &[
("name", instance_name),
("ver", "1.0"),
("mac", mac),
("proto", "aa-tcp"),
];
mdns.add_service(
Some(instance_name),
"_androidauto",
"_tcp",
port,
txt_records,
)
.context("adding _androidauto._tcp mDNS service")?;
log::info!(
"mDNS: advertising _androidauto._tcp on port {} (hostname={})",
port,
hostname
);
Ok(mdns)
}
+229
View File
@@ -0,0 +1,229 @@
//! Navigation channel handler.
//!
//! Receives turn-by-turn navigation data from the phone:
//! - Turn events (street name, direction, maneuver type, turn image)
//! - Distance events (meters, time, unit)
//! - Navigation status (active, inactive, rerouting)
use protobuf::{Enum, Message};
use crate::common::CommonMessage;
use crate::frame::{ChannelId, Frame};
use crate::proto::Wifi;
// ---------------------------------------------------------------------------
// Navigation data structures (what the UI will consume)
// ---------------------------------------------------------------------------
/// A maneuver direction.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ManeuverDirection {
Unknown,
Left,
Right,
Unspecified,
}
impl From<Wifi::maneuver_direction::Enum> for ManeuverDirection {
fn from(v: Wifi::maneuver_direction::Enum) -> Self {
match v {
Wifi::maneuver_direction::Enum::LEFT => Self::Left,
Wifi::maneuver_direction::Enum::RIGHT => Self::Right,
Wifi::maneuver_direction::Enum::UNSPECIFIED => Self::Unspecified,
_ => Self::Unknown,
}
}
}
/// A maneuver type.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ManeuverType {
Unknown,
Depart,
NameChange,
SlightTurn,
Turn,
SharpTurn,
UTurn,
OnRamp,
OffRamp,
Fork,
Merge,
RoundaboutEnter,
RoundaboutExit,
RoundaboutEnterAndExit,
Straight,
FerryBoat,
FerryTrain,
Destination,
}
impl From<Wifi::maneuver_type::Enum> for ManeuverType {
fn from(v: Wifi::maneuver_type::Enum) -> Self {
match v {
Wifi::maneuver_type::Enum::DEPART => Self::Depart,
Wifi::maneuver_type::Enum::NAME_CHANGE => Self::NameChange,
Wifi::maneuver_type::Enum::SLIGHT_TURN => Self::SlightTurn,
Wifi::maneuver_type::Enum::TURN => Self::Turn,
Wifi::maneuver_type::Enum::SHARP_TURN => Self::SharpTurn,
Wifi::maneuver_type::Enum::U_TURN => Self::UTurn,
Wifi::maneuver_type::Enum::ON_RAMP => Self::OnRamp,
Wifi::maneuver_type::Enum::OFF_RAMP => Self::OffRamp,
Wifi::maneuver_type::Enum::FORK => Self::Fork,
Wifi::maneuver_type::Enum::MERGE => Self::Merge,
Wifi::maneuver_type::Enum::ROUNDABOUT_ENTER => Self::RoundaboutEnter,
Wifi::maneuver_type::Enum::ROUNDABOUT_EXIT => Self::RoundaboutExit,
Wifi::maneuver_type::Enum::ROUNDABOUT_ENTER_AND_EXIT => Self::RoundaboutEnterAndExit,
Wifi::maneuver_type::Enum::STRAIGHT => Self::Straight,
Wifi::maneuver_type::Enum::FERRY_BOAT => Self::FerryBoat,
Wifi::maneuver_type::Enum::FERRY_TRAIN => Self::FerryTrain,
Wifi::maneuver_type::Enum::DESTINATION => Self::Destination,
_ => Self::Unknown,
}
}
}
/// A distance unit.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum DistanceUnit {
Unknown,
Meters,
Kilometers,
KilometersPartial,
Miles,
MilesPartial,
Feet,
Yards,
}
impl From<Wifi::distance_unit::Enum> for DistanceUnit {
fn from(v: Wifi::distance_unit::Enum) -> Self {
match v {
Wifi::distance_unit::Enum::METERS => Self::Meters,
Wifi::distance_unit::Enum::KILOMETERS => Self::Kilometers,
Wifi::distance_unit::Enum::KILOMETERS_PARTIAL => Self::KilometersPartial,
Wifi::distance_unit::Enum::MILES => Self::Miles,
Wifi::distance_unit::Enum::MILES_PARTIAL => Self::MilesPartial,
Wifi::distance_unit::Enum::FEET => Self::Feet,
Wifi::distance_unit::Enum::YARDS => Self::Yards,
_ => Self::Unknown,
}
}
}
/// Navigation status.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum NavStatus {
Unavailable,
Active,
Inactive,
Rerouting,
}
impl From<Wifi::navigation_status::Enum> for NavStatus {
fn from(v: Wifi::navigation_status::Enum) -> Self {
match v {
Wifi::navigation_status::Enum::ACTIVE => Self::Active,
Wifi::navigation_status::Enum::INACTIVE => Self::Inactive,
Wifi::navigation_status::Enum::REROUTING => Self::Rerouting,
_ => Self::Unavailable,
}
}
}
/// A turn-by-turn navigation instruction.
#[derive(Debug, Clone)]
pub struct TurnInstruction {
pub street_name: String,
pub direction: ManeuverDirection,
pub maneuver: ManeuverType,
/// Raw image bytes for the turn arrow (typically 256×256 @ 16-bit).
pub turn_image: Vec<u8>,
pub roundabout_exit_number: u32,
pub roundabout_exit_angle: u32,
}
/// Distance to the next navigation step.
#[derive(Debug, Clone)]
pub struct DistanceInfo {
pub meters: u32,
pub time_to_step_seconds: u32,
pub distance_to_step_millis: u32,
pub unit: DistanceUnit,
}
/// A navigation event received from the phone.
#[derive(Debug, Clone)]
pub enum NavEvent {
StatusChanged(NavStatus),
TurnInstruction(TurnInstruction),
DistanceUpdate(DistanceInfo),
}
// ---------------------------------------------------------------------------
// Wire protocol parsing
// ---------------------------------------------------------------------------
/// Parse a navigation-channel frame into a NavEvent.
pub fn parse_nav_frame(frame: &Frame) -> Result<NavEvent, String> {
let mut ty = [0u8; 2];
ty.copy_from_slice(&frame.data[0..2]);
let ty = u16::from_be_bytes(ty);
let Some(msg_type) = Wifi::navigation_channel_message::Enum::from_i32(ty as i32) else {
return Err(format!("Unknown navigation message 0x{:x}", ty));
};
match msg_type {
Wifi::navigation_channel_message::Enum::STATUS => {
let m = Wifi::NavigationStatus::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid NavigationStatus: {}", e))?;
Ok(NavEvent::StatusChanged(m.status().into()))
}
Wifi::navigation_channel_message::Enum::TURN_EVENT => {
let m = Wifi::NavigationTurnEvent::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid NavigationTurnEvent: {}", e))?;
Ok(NavEvent::TurnInstruction(TurnInstruction {
street_name: m.street_name().to_string(),
direction: m.maneuverDirection().into(),
maneuver: m.maneuverType().into(),
turn_image: m.turnImage().to_vec(),
roundabout_exit_number: m.roundaboutExitNumber(),
roundabout_exit_angle: m.roundaboutExitAngle(),
}))
}
Wifi::navigation_channel_message::Enum::DISTANCE_EVENT => {
let m = Wifi::NavigationDistanceEvent::parse_from_bytes(&frame.data[2..])
.map_err(|e| format!("Invalid NavigationDistanceEvent: {}", e))?;
Ok(NavEvent::DistanceUpdate(DistanceInfo {
meters: m.meters(),
time_to_step_seconds: m.timeToStepSeconds(),
distance_to_step_millis: m.distanceToStepMillis(),
unit: m.distanceUnit().into(),
}))
}
Wifi::navigation_channel_message::Enum::NONE => {
Err("NONE navigation message".into())
}
}
}
/// Build the NavigationChannel descriptor for service discovery.
pub fn build_nav_channel_descriptor(channel_id: ChannelId) -> Wifi::ChannelDescriptor {
let mut chan = Wifi::ChannelDescriptor::new();
chan.set_channel_id(channel_id as u32);
let mut nav = Wifi::NavigationChannel::new();
nav.set_minimum_interval_ms(1000);
nav.set_type(Wifi::navigation_turn_type::Enum::IMAGE);
let mut img_opts = Wifi::NavigationImageOptions::new();
img_opts.set_width(256);
img_opts.set_height(256);
img_opts.set_colour_depth_bits(16);
img_opts.set_dunno(255);
nav.image_options = ::protobuf::MessageField::some(img_opts);
chan.navigation_channel = ::protobuf::MessageField::some(nav);
chan
}
+381
View File
@@ -0,0 +1,381 @@
//! Android Auto protocol session handler.
//!
//! This module runs the main protocol loop after a TCP connection is
//! established from the phone. It handles:
//! 1. Version handshake
//! 2. TLS negotiation
//! 3. Service discovery
//! 4. Channel message dispatching (navigation, video stub, audio stub, etc.)
use std::io::{Read, Write};
use std::sync::mpsc;
use anyhow::{Context, Result, bail};
use crate::channels;
use crate::common::CommonMessage;
use crate::config::HeadUnitConfig;
use crate::control::{self, ControlMessage};
use crate::decoder::{DecoderConfig, H264Decoder, DISPLAY_WIDTH, DISPLAY_HEIGHT};
use crate::frame::{self, Frame, FrameReader, TlsState};
use crate::navigation::{self, NavEvent};
use crate::proto::Wifi;
/// A decoded video frame ready for display (RGB565).
pub struct VideoFrame {
/// RGB565 pixel data, DISPLAY_WIDTH × DISPLAY_HEIGHT pixels.
pub pixels: Vec<u16>,
/// Monotonic frame number.
pub frame_number: u64,
}
/// The channel IDs we assign during service discovery.
/// These must match the order we build channel descriptors.
#[derive(Debug, Clone, Copy)]
pub struct ChannelMap {
pub control: u8, // 0 — always
pub input: u8, // 1
pub sensor: u8, // 2
pub video: u8, // 3
pub media_audio: u8, // 4
pub speech_audio: u8, // 5
pub system_audio: u8, // 6
pub avinput: u8, // 7 — microphone input
pub navigation: u8, // 8
pub media_status: u8, // 9
}
impl Default for ChannelMap {
fn default() -> Self {
Self {
control: 0,
input: 1,
sensor: 2,
video: 3,
media_audio: 4,
speech_audio: 5,
system_audio: 6,
avinput: 7,
navigation: 8,
media_status: 9,
}
}
}
/// Run the Android Auto protocol session over an established TCP stream.
///
/// `nav_tx` sends navigation events to the UI thread.
/// `video_tx` sends decoded RGB565 frames to the display thread.
pub fn run_session<S: Read + Write>(
stream: &mut S,
config: &HeadUnitConfig,
nav_tx: &mpsc::Sender<NavEvent>,
video_tx: &mpsc::Sender<VideoFrame>,
) -> Result<()> {
let ch = ChannelMap::default();
let mut tls = TlsState::new();
let mut reader = FrameReader::new();
// Build channel descriptors for service discovery
let channel_descs = build_channel_descriptors(&ch);
// Step 1: Send version request
log::info!("Sending version request");
let version_frame = control::version_request_frame();
let wire = version_frame.to_wire_bytes(None);
stream.write_all(&wire).context("sending version request")?;
stream.flush()?;
// Step 2: Main message loop
let mut video_session: Option<i32> = None;
let mut video_ack_counter: u32 = 0;
let mut decoder: Option<H264Decoder> = None;
loop {
let frame = reader.read_frame(stream, &mut tls)?;
let channel_id = frame.header.channel_id;
let is_control_bit = frame.header.frame.get_control();
let is_encrypted = frame.header.frame.get_encryption();
// Log every incoming frame for debugging
log::info!(
"⬅️ ch={} ctrl={} enc={} len={} data={:02x?}",
channel_id, is_control_bit, is_encrypted,
frame.data.len(),
&frame.data[..frame.data.len().min(16)]
);
// Control channel (channel 0)
if channel_id == ch.control {
// Try control message first (non-specific)
if let Ok(ctrl) = ControlMessage::try_from_frame(&frame) {
match ctrl {
ControlMessage::VersionResponse { major, minor, status } => {
if status == 0xFFFF {
bail!("Version mismatch: phone reports incompatible ({}.{})", major, minor);
}
log::info!("Phone version: {}.{}", major, minor);
// Start TLS handshake
if let Some(tls_data) = tls.start_handshake()? {
let hs_frame = control::ssl_handshake_frame(tls_data);
let wire = hs_frame.to_wire_bytes(None);
stream.write_all(&wire)?;
stream.flush()?;
}
}
ControlMessage::SslHandshake(data) => {
if let Some(response) = tls.process_handshake(&data)? {
let hs_frame = control::ssl_handshake_frame(response);
let wire = hs_frame.to_wire_bytes(None);
stream.write_all(&wire)?;
stream.flush()?;
}
if tls.handshake_complete {
log::info!("TLS handshake complete");
let auth_frame = control::ssl_auth_complete_frame(true);
let wire = auth_frame.to_wire_bytes(None);
stream.write_all(&wire)?;
stream.flush()?;
}
}
ControlMessage::ServiceDiscoveryRequest(_) => {
log::info!("Service discovery — advertising {} channels", channel_descs.len());
let resp = control::service_discovery_response_frame(config, &channel_descs);
frame::write_frame(stream, &resp, &mut tls)?;
}
ControlMessage::PingRequest(ref ping) => {
let resp = control::ping_response_frame(ping.timestamp());
frame::write_frame(stream, &resp, &mut tls)?;
}
ControlMessage::PingResponse(_) => { /* ignore */ }
ControlMessage::AudioFocusRequest(ref req) => {
let resp = control::audio_focus_response_frame(req);
frame::write_frame(stream, &resp, &mut tls)?;
}
ControlMessage::NavigationFocusRequest(_) => {
log::info!("Navigation focus requested");
let resp = control::nav_focus_response_frame();
frame::write_frame(stream, &resp, &mut tls)?;
}
ControlMessage::ShutdownRequest(ref req) => {
log::info!("Shutdown requested: {:?}", req.reason());
let resp = control::shutdown_response_frame();
frame::write_frame(stream, &resp, &mut tls)?;
return Ok(());
}
ControlMessage::VoiceSession(_) => {
log::info!("Voice session request (ignoring — nav only)");
}
_ => {
log::warn!("Unhandled control message: {:?}", ctrl);
}
}
continue;
}
// Try common message (channel open)
if let Ok(common) = CommonMessage::try_from_frame(&frame) {
match common {
CommonMessage::ChannelOpenRequest(req) => {
log::info!("Channel open request for channel {}", req.channel_id());
let resp = CommonMessage::open_response_frame(
channel_id,
Wifi::status::Enum::OK,
);
frame::write_frame(stream, &resp, &mut tls)?;
}
_ => {}
}
continue;
}
log::warn!("Unhandled control frame: {:02x?}", &frame.data[..frame.data.len().min(16)]);
continue;
}
// Try common channel-open messages on any channel
if let Ok(common) = CommonMessage::try_from_frame(&frame) {
match common {
CommonMessage::ChannelOpenRequest(_) => {
let resp = CommonMessage::open_response_frame(
channel_id,
Wifi::status::Enum::OK,
);
frame::write_frame(stream, &resp, &mut tls)?;
}
_ => {}
}
continue;
}
// Navigation channel
if channel_id == ch.navigation {
match navigation::parse_nav_frame(&frame) {
Ok(event) => {
log::info!("Nav event: {:?}", event);
let _ = nav_tx.send(event);
}
Err(e) => log::warn!("Nav parse error: {}", e),
}
continue;
}
// Video channel — decode H.264 and send RGB565 frames to display
if channel_id == ch.video {
if let Ok(av) = channels::parse_av_frame(&frame) {
match av {
channels::AvMessage::SetupRequest { .. } => {
log::info!("Video setup request");
let resp = channels::video_setup_response_frame(channel_id);
frame::write_frame(stream, &resp, &mut tls)?;
// Don't send unsolicited VideoFocusIndication here.
// Wait for the phone to send VideoFocusRequest first.
// Initialize H.264 decoder (800×480 → 480×320)
if decoder.is_none() {
match H264Decoder::new(DecoderConfig::default()) {
Ok(dec) => {
log::info!("H.264 decoder initialized");
decoder = Some(dec);
}
Err(e) => {
log::error!("Failed to init H.264 decoder: {:?}", e);
}
}
}
}
channels::AvMessage::StartIndication { session, .. } => {
log::info!("Video start (session={})", session);
video_session = Some(session);
video_ack_counter = 0;
// Don't send premature ACK — wait for actual data
}
channels::AvMessage::StopIndication => {
log::info!("Video stop");
video_session = None;
}
channels::AvMessage::MediaData { data, .. } => {
video_ack_counter += 1;
// Log periodically to confirm data is flowing
if video_ack_counter % 30 == 1 {
log::info!(
"📹 Video data: frame #{}, chunk {} bytes, total NAL {}",
video_ack_counter, data.len(),
decoder.as_ref().map(|d| d.nal_len()).unwrap_or(0)
);
}
// Feed H.264 data to decoder
if let Some(ref mut dec) = decoder {
match dec.decode(&data) {
Ok(Some(rgb565)) => {
// Send decoded frame to display thread
let frame = VideoFrame {
pixels: rgb565.to_vec(),
frame_number: dec.frames_decoded(),
};
// Non-blocking send — drop frame if display is behind
let _ = video_tx.send(frame);
}
Ok(None) => { /* accumulating NAL data */ }
Err(e) => {
log::warn!("H.264 decode error: {:?}", e);
}
}
}
// Ack EVERY frame — max_unacked=2 means the phone
// stops sending after 2 unacked frames
if let Some(session) = video_session {
let ack = channels::media_ack_frame(
channel_id,
session,
video_ack_counter,
);
frame::write_frame(stream, &ack, &mut tls)?;
}
}
channels::AvMessage::VideoFocusRequest { focused } => {
log::info!("Video focus request: {}", focused);
let focus = channels::video_focus_frame(channel_id, true);
frame::write_frame(stream, &focus, &mut tls)?;
}
_ => {}
}
}
continue;
}
// Audio channels (stub — ack and discard)
if channel_id == ch.media_audio
|| channel_id == ch.speech_audio
|| channel_id == ch.system_audio
{
if let Ok(av) = channels::parse_av_frame(&frame) {
match av {
channels::AvMessage::SetupRequest { .. } => {
let resp = channels::audio_setup_response_frame(channel_id);
frame::write_frame(stream, &resp, &mut tls)?;
}
channels::AvMessage::StartIndication { .. } => {
log::debug!("Audio start on channel {} (discarding)", channel_id);
}
channels::AvMessage::StopIndication => {
log::debug!("Audio stop on channel {}", channel_id);
}
channels::AvMessage::MediaData { .. } => {
// Discard audio data silently
}
_ => {}
}
}
continue;
}
// AV Input channel (microphone) — stub: ack setup requests
if channel_id == ch.avinput {
if let Ok(av) = channels::parse_av_frame(&frame) {
match av {
channels::AvMessage::SetupRequest { .. } => {
let resp = channels::audio_setup_response_frame(channel_id);
frame::write_frame(stream, &resp, &mut tls)?;
}
_ => {}
}
}
continue;
}
// Media status channel — ignore for now
if channel_id == ch.media_status {
log::debug!("Media status channel data (ignoring)");
continue;
}
// Sensor / Input channels — just handle open requests (already handled above)
log::debug!(
"Unhandled frame on channel {}: {:02x?}",
channel_id,
&frame.data[..frame.data.len().min(8)]
);
}
}
/// Build all channel descriptors for service discovery.
fn build_channel_descriptors(ch: &ChannelMap) -> Vec<Wifi::ChannelDescriptor> {
vec![
// ch.control = 0 (no descriptor needed, implicit)
channels::build_input_channel_descriptor(ch.input),
channels::build_sensor_channel_descriptor(ch.sensor),
channels::build_video_channel_descriptor(ch.video),
channels::build_media_audio_channel_descriptor(ch.media_audio),
channels::build_speech_audio_channel_descriptor(ch.speech_audio),
channels::build_system_audio_channel_descriptor(ch.system_audio),
channels::build_avinput_channel_descriptor(ch.avinput),
navigation::build_nav_channel_descriptor(ch.navigation),
channels::build_media_info_channel_descriptor(ch.media_status),
]
}
+92
View File
@@ -0,0 +1,92 @@
//! Minimal FT6336U capacitive touch driver for WT32-SC01 Plus.
//!
//! Reads touch coordinates via I2C. No gesture detection,
//! just raw x/y + press/release.
use esp_idf_hal::i2c::{I2cConfig, I2cDriver};
use esp_idf_hal::units::KiloHertz;
use anyhow::Result;
const FT6336U_ADDR: u8 = 0x38;
const REG_NUM_TOUCHES: u8 = 0x02;
const REG_TOUCH1_XH: u8 = 0x03;
/// A touch event.
#[derive(Debug, Clone, Copy)]
pub struct TouchEvent {
pub x: u16,
pub y: u16,
pub pressed: bool,
}
/// Simple FT6336U touch driver.
pub struct Touch<'d> {
i2c: I2cDriver<'d>,
last_pressed: bool,
last_x: u16,
last_y: u16,
}
impl<'d> Touch<'d> {
/// Initialize the touch controller on I2C (SDA=GPIO6, SCL=GPIO5).
pub fn new(
i2c: impl esp_idf_hal::i2c::I2c + 'd,
sda: impl esp_idf_hal::gpio::InputPin + esp_idf_hal::gpio::OutputPin + 'd,
scl: impl esp_idf_hal::gpio::InputPin + esp_idf_hal::gpio::OutputPin + 'd,
) -> Result<Self> {
let config = I2cConfig::new().baudrate(KiloHertz(400).into());
let i2c = I2cDriver::new(i2c, sda, scl, &config)?;
log::info!("FT6336U touch controller initialized");
Ok(Self {
i2c,
last_pressed: false,
last_x: 0,
last_y: 0,
})
}
/// Poll for touch. Returns Some(event) if state changed or finger is down.
pub fn poll(&mut self) -> Option<TouchEvent> {
let mut buf = [0u8; 5]; // num_touches + 4 bytes for touch1
if self.i2c.write_read(FT6336U_ADDR, &[REG_NUM_TOUCHES], &mut buf, 100).is_err() {
// I2C error — treat as no touch
if self.last_pressed {
self.last_pressed = false;
return Some(TouchEvent {
x: self.last_x,
y: self.last_y,
pressed: false,
});
}
return None;
}
let num_touches = buf[0] & 0x0F;
if num_touches == 0 {
if self.last_pressed {
self.last_pressed = false;
return Some(TouchEvent {
x: self.last_x,
y: self.last_y,
pressed: false,
});
}
return None;
}
// Parse touch point 1 (buf[1..5] = XH, XL, YH, YL)
let x = (((buf[1] & 0x0F) as u16) << 8) | (buf[2] as u16);
let y = (((buf[3] & 0x0F) as u16) << 8) | (buf[4] as u16);
self.last_x = x;
self.last_y = y;
self.last_pressed = true;
Some(TouchEvent {
x,
y,
pressed: true,
})
}
}