Connect to LiveKit room in capture example

This commit is contained in:
Nathan Sobo 2022-09-06 13:19:17 -06:00 committed by Antonio Scandurra
parent 45d83b557b
commit 5347c7d678
12 changed files with 173 additions and 474 deletions

21
Cargo.lock generated
View File

@ -765,12 +765,16 @@ dependencies = [
"foreign-types",
"futures",
"gpui",
"hmac 0.12.1",
"jwt",
"live_kit",
"log",
"media",
"objc",
"parking_lot 0.11.2",
"postage",
"serde",
"sha2 0.10.2",
"simplelog",
]
@ -2747,6 +2751,21 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5"
[[package]]
name = "jwt"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6204285f77fe7d9784db3fdc449ecce1a0114927a51d5a41c4c7a292011c015f"
dependencies = [
"base64",
"crypto-common",
"digest 0.10.3",
"hmac 0.12.1",
"serde",
"serde_json",
"sha2 0.10.2",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
@ -2921,6 +2940,8 @@ dependencies = [
name = "live_kit"
version = "0.1.0"
dependencies = [
"core-foundation",
"futures",
"serde",
"serde_json",
]

View File

@ -18,10 +18,14 @@ core-foundation = "0.9.3"
core-graphics = "0.22.3"
foreign-types = "0.3"
futures = "0.3"
hmac = "0.12"
jwt = "0.16"
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
objc = "0.2"
parking_lot = "0.11.1"
postage = { version = "0.4.1", features = ["futures-traits"] }
serde = { version = "1.0", features = ["derive", "rc"] }
sha2 = "0.10"
simplelog = "0.9"
[build-dependencies]

View File

@ -1,38 +1,7 @@
use std::{env, path::PathBuf, process::Command};
fn main() {
// Find WebRTC.framework as a sibling of the executable when running outside of an application bundle
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
println!("cargo:rustc-link-lib=framework=ScreenCaptureKit");
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=12.3");
let sdk_path = String::from_utf8(
Command::new("xcrun")
.args(&["--sdk", "macosx", "--show-sdk-path"])
.output()
.unwrap()
.stdout,
)
.unwrap();
let sdk_path = sdk_path.trim_end();
println!("cargo:rerun-if-changed=src/bindings.h");
let bindings = bindgen::Builder::default()
.header("src/bindings.h")
.clang_arg(format!("-isysroot{}", sdk_path))
.clang_arg("-xobjective-c")
.allowlist_function("dispatch_queue_create")
.allowlist_type("SCStreamOutputType")
.allowlist_type("SCFrameStatus")
.allowlist_var("SCStreamFrameInfo.*")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.layout_tests(false)
.generate()
.expect("unable to generate bindings");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("couldn't write dispatch bindings");
// Register exported Objective-C selectors, protocols, etc
println!("cargo:rustc-link-arg=-Wl,-ObjC");
}

View File

@ -1,2 +0,0 @@
#import <ScreenCaptureKit/SCStream.h>
#import <dispatch/dispatch.h>

View File

@ -1,8 +0,0 @@
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(unused)]
use objc::*;
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));

View File

@ -1,178 +0,0 @@
use anyhow::Result;
use core_foundation::base::{OSStatus, TCFType};
use media::{
core_media::{CMSampleBufferRef, CMSampleTimingInfo, CMVideoCodecType},
core_video::CVImageBuffer,
video_toolbox::{VTCompressionSession, VTEncodeInfoFlags},
};
use std::ffi::c_void;
pub struct CompressionSession<F> {
session: VTCompressionSession,
output_callback: Box<F>,
}
impl<F: 'static + Send + FnMut(OSStatus, VTEncodeInfoFlags, CMSampleBufferRef)>
CompressionSession<F>
{
pub fn new(width: usize, height: usize, codec: CMVideoCodecType, callback: F) -> Result<Self> {
let callback = Box::new(callback);
let session = VTCompressionSession::new(
width,
height,
codec,
Some(Self::output_callback),
callback.as_ref() as *const _ as *const c_void,
)?;
Ok(Self {
session,
output_callback: callback,
})
}
pub fn encode_frame(&self, buffer: &CVImageBuffer, timing: CMSampleTimingInfo) -> Result<()> {
self.session.encode_frame(
buffer.as_concrete_TypeRef(),
timing.presentationTimeStamp,
timing.duration,
)
}
extern "C" fn output_callback(
output_callback_ref_con: *mut c_void,
_: *mut c_void,
status: OSStatus,
flags: VTEncodeInfoFlags,
sample_buffer: CMSampleBufferRef,
) {
let callback = unsafe { &mut *(output_callback_ref_con as *mut F) };
callback(status, flags, sample_buffer);
}
}
// unsafe extern "C" fn output(
// output_callback_ref_con: *mut c_void,
// source_frame_ref_con: *mut c_void,
// status: OSStatus,
// info_flags: VTEncodeInfoFlags,
// sample_buffer: CMSampleBufferRef,
// ) {
// if status != 0 {
// println!("error encoding frame, code: {}", status);
// return;
// }
// let sample_buffer = CMSampleBuffer::wrap_under_get_rule(sample_buffer);
// let mut is_iframe = false;
// let attachments = sample_buffer.attachments();
// if let Some(attachments) = attachments.first() {
// is_iframe = attachments
// .find(bindings::kCMSampleAttachmentKey_NotSync as CFStringRef)
// .map_or(true, |not_sync| {
// CFBooleanGetValue(*not_sync as CFBooleanRef)
// });
// }
// const START_CODE: [u8; 4] = [0x00, 0x00, 0x00, 0x01];
// if is_iframe {
// let format_description = sample_buffer.format_description();
// for ix in 0..format_description.h264_parameter_set_count() {
// let parameter_set = format_description.h264_parameter_set_at_index(ix);
// stream.extend(START_CODE);
// stream.extend(parameter_set);
// }
// }
// println!("YO!");
// }
// static void videoFrameFinishedEncoding(void *outputCallbackRefCon,
// void *sourceFrameRefCon,
// OSStatus status,
// VTEncodeInfoFlags infoFlags,
// CMSampleBufferRef sampleBuffer) {
// // Check if there were any errors encoding
// if (status != noErr) {
// NSLog(@"Error encoding video, err=%lld", (int64_t)status);
// return;
// }
// // In this example we will use a NSMutableData object to store the
// // elementary stream.
// NSMutableData *elementaryStream = [NSMutableData data];
// // Find out if the sample buffer contains an I-Frame.
// // If so we will write the SPS and PPS NAL units to the elementary stream.
// BOOL isIFrame = NO;
// CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, 0);
// if (CFArrayGetCount(attachmentsArray)) {
// CFBooleanRef notSync;
// CFDictionaryRef dict = CFArrayGetValueAtIndex(attachmentsArray, 0);
// BOOL keyExists = CFDictionaryGetValueIfPresent(dict,
// kCMSampleAttachmentKey_NotSync,
// (const void **)&notSync);
// // An I-Frame is a sync frame
// isIFrame = !keyExists || !CFBooleanGetValue(notSync);
// }
// // This is the start code that we will write to
// // the elementary stream before every NAL unit
// static const size_t startCodeLength = 4;
// static const uint8_t startCode[] = {0x00, 0x00, 0x00, 0x01};
// // Write the SPS and PPS NAL units to the elementary stream before every I-Frame
// if (isIFrame) {
// CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer);
// // Find out how many parameter sets there are
// size_t numberOfParameterSets;
// CMVideoFormatDescriptionGetH264ParameterSetAtIndex(description,
// 0, NULL, NULL,
// &numberOfParameterSets,
// NULL);
// // Write each parameter set to the elementary stream
// for (int i = 0; i < numberOfParameterSets; i++) {
// const uint8_t *parameterSetPointer;
// size_t parameterSetLength;
// CMVideoFormatDescriptionGetH264ParameterSetAtIndex(description,
// i,
// &parameterSetPointer,
// &parameterSetLength,
// NULL, NULL);
// // Write the parameter set to the elementary stream
// [elementaryStream appendBytes:startCode length:startCodeLength];
// [elementaryStream appendBytes:parameterSetPointer length:parameterSetLength];
// }
// }
// // Get a pointer to the raw AVCC NAL unit data in the sample buffer
// size_t blockBufferLength;
// uint8_t *bufferDataPointer = NULL;
// CMBlockBufferGetDataPointer(CMSampleBufferGetDataBuffer(sampleBuffer),
// 0,
// NULL,
// &blockBufferLength,
// (char **)&bufferDataPointer);
// // Loop through all the NAL units in the block buffer
// // and write them to the elementary stream with
// // start codes instead of AVCC length headers
// size_t bufferOffset = 0;
// static const int AVCCHeaderLength = 4;
// while (bufferOffset < blockBufferLength - AVCCHeaderLength) {
// // Read the NAL unit length
// uint32_t NALUnitLength = 0;
// memcpy(&NALUnitLength, bufferDataPointer + bufferOffset, AVCCHeaderLength);
// // Convert the length value from Big-endian to Little-endian
// NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
// // Write start code to the elementary stream
// [elementaryStream appendBytes:startCode length:startCodeLength];
// // Write the NAL unit without the AVCC length header to the elementary stream
// [elementaryStream appendBytes:bufferDataPointer + bufferOffset + AVCCHeaderLength
// length:NALUnitLength];
// // Move to the next NAL unit in the block buffer
// bufferOffset += AVCCHeaderLength + NALUnitLength;
// }
// }

View File

@ -0,0 +1,71 @@
use anyhow::Result;
use hmac::{Hmac, Mac};
use jwt::SignWithKey;
use serde::Serialize;
use sha2::Sha256;
use std::{
ops::Add,
time::{Duration, SystemTime, UNIX_EPOCH},
};
static DEFAULT_TTL: Duration = Duration::from_secs(6 * 60 * 60); // 6 hours
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
struct ClaimGrants<'a> {
iss: &'a str,
sub: &'a str,
iat: u64,
exp: u64,
nbf: u64,
jwtid: &'a str,
video: VideoGrant<'a>,
}
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
struct VideoGrant<'a> {
room_create: Option<bool>,
room_join: Option<bool>,
room_list: Option<bool>,
room_record: Option<bool>,
room_admin: Option<bool>,
room: Option<&'a str>,
can_publish: Option<bool>,
can_subscribe: Option<bool>,
can_publish_data: Option<bool>,
hidden: Option<bool>,
recorder: Option<bool>,
}
pub fn create_token(
api_key: &str,
secret_key: &str,
room_name: &str,
participant_name: &str,
) -> Result<String> {
let secret_key: Hmac<Sha256> = Hmac::new_from_slice(secret_key.as_bytes())?;
let now = SystemTime::now();
let claims = ClaimGrants {
iss: api_key,
sub: participant_name,
iat: now.duration_since(UNIX_EPOCH).unwrap().as_secs(),
exp: now
.add(DEFAULT_TTL)
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs(),
nbf: 0,
jwtid: participant_name,
video: VideoGrant {
room: Some(room_name),
room_join: Some(true),
can_publish: Some(true),
can_subscribe: Some(true),
..Default::default()
},
};
Ok(claims.sign_with_key(&secret_key)?)
}

View File

@ -1,20 +1,5 @@
mod bindings;
mod compression_session;
mod live_kit_token;
use crate::{bindings::SCStreamOutputType, compression_session::CompressionSession};
use block::ConcreteBlock;
use byteorder::{BigEndian, ReadBytesExt};
use bytes::BytesMut;
use cocoa::{
base::{id, nil, YES},
foundation::{NSArray, NSString, NSUInteger},
};
use core_foundation::{
base::{CFRelease, TCFType},
number::{CFBooleanGetValue, CFBooleanRef, CFNumberRef},
string::CFStringRef,
};
use futures::StreamExt;
use gpui::{
actions,
elements::{Canvas, *},
@ -24,37 +9,12 @@ use gpui::{
};
use live_kit::Room;
use log::LevelFilter;
use media::{
core_media::{
kCMSampleAttachmentKey_NotSync, kCMVideoCodecType_H264, CMSampleBuffer, CMSampleBufferRef,
CMTimeMake,
},
core_video::{self, CVImageBuffer},
video_toolbox::VTCompressionSession,
};
use objc::{
class,
declare::ClassDecl,
msg_send,
runtime::{Class, Object, Sel},
sel, sel_impl,
};
use parking_lot::Mutex;
use media::core_video::CVImageBuffer;
use simplelog::SimpleLogger;
use std::{ffi::c_void, ptr, slice, str, sync::Arc};
#[allow(non_upper_case_globals)]
const NSUTF8StringEncoding: NSUInteger = 4;
actions!(capture, [Quit]);
fn main() {
println!("Creating room...");
let room = Room::new();
println!("Dropping room...");
drop(room);
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
gpui::App::new(()).unwrap().run(|cx| {
@ -70,12 +30,32 @@ fn main() {
}],
}]);
cx.add_window(Default::default(), |cx| ScreenCaptureView::new(cx));
let live_kit_key = std::env::var("LIVE_KIT_KEY").unwrap();
let live_kit_secret = std::env::var("LIVE_KIT_SECRET").unwrap();
let token = live_kit_token::create_token(
&live_kit_key,
&live_kit_secret,
"test-room",
"test-participant",
)
.unwrap();
let room = live_kit::Room::new();
cx.spawn(|cx| async move {
println!("connecting...");
room.connect("wss://zed.livekit.cloud", &token).await;
println!("connected!");
drop(room);
})
.detach();
// cx.add_window(Default::default(), |cx| ScreenCaptureView::new(cx));
});
}
struct ScreenCaptureView {
image_buffer: Option<core_video::CVImageBuffer>,
image_buffer: Option<CVImageBuffer>,
}
impl gpui::Entity for ScreenCaptureView {
@ -83,188 +63,7 @@ impl gpui::Entity for ScreenCaptureView {
}
impl ScreenCaptureView {
pub fn new(cx: &mut ViewContext<Self>) -> Self {
let (image_buffer_tx, mut image_buffer_rx) =
postage::watch::channel::<Option<CVImageBuffer>>();
let image_buffer_tx = Arc::new(Mutex::new(image_buffer_tx));
unsafe {
let block = ConcreteBlock::new(move |content: id, error: id| {
if !error.is_null() {
println!(
"ERROR {}",
string_from_objc(msg_send![error, localizedDescription])
);
return;
}
let applications: id = msg_send![content, applications];
let displays: id = msg_send![content, displays];
let display: id = displays.objectAtIndex(0);
let display_width: usize = msg_send![display, width];
let display_height: usize = msg_send![display, height];
let mut compression_buffer = BytesMut::new();
// let compression_session = CompressionSession::new(
// display_width,
// display_height,
// kCMVideoCodecType_H264,
// move |status, flags, sample_buffer| {
// if status != 0 {
// println!("error encoding frame, code: {}", status);
// return;
// }
// let sample_buffer = CMSampleBuffer::wrap_under_get_rule(sample_buffer);
// let mut is_iframe = false;
// let attachments = sample_buffer.attachments();
// if let Some(attachments) = attachments.first() {
// is_iframe = attachments
// .find(kCMSampleAttachmentKey_NotSync as CFStringRef)
// .map_or(true, |not_sync| {
// CFBooleanGetValue(*not_sync as CFBooleanRef)
// });
// }
// const START_CODE: [u8; 4] = [0x00, 0x00, 0x00, 0x01];
// if is_iframe {
// let format_description = sample_buffer.format_description();
// for ix in 0..format_description.h264_parameter_set_count() {
// let parameter_set =
// format_description.h264_parameter_set_at_index(ix).unwrap();
// compression_buffer.extend_from_slice(&START_CODE);
// compression_buffer.extend_from_slice(parameter_set);
// let nal_unit = compression_buffer.split();
// }
// }
// let data = sample_buffer.data();
// let mut data = data.bytes();
// const AVCC_HEADER_LENGTH: usize = 4;
// while data.len() - AVCC_HEADER_LENGTH > 0 {
// let nal_unit_len = match data.read_u32::<BigEndian>() {
// Ok(len) => len as usize,
// Err(error) => {
// log::error!("error decoding nal unit length: {}", error);
// return;
// }
// };
// compression_buffer.extend_from_slice(&START_CODE);
// compression_buffer.extend_from_slice(&data[..nal_unit_len as usize]);
// data = &data[nal_unit_len..];
// let nal_unit = compression_buffer.split();
// }
// },
// )
// .unwrap();
let mut decl = ClassDecl::new("CaptureOutput", class!(NSObject)).unwrap();
decl.add_ivar::<*mut c_void>("callback");
decl.add_method(
sel!(stream:didOutputSampleBuffer:ofType:),
sample_output as extern "C" fn(&Object, Sel, id, id, SCStreamOutputType),
);
let capture_output_class = decl.register();
let output: id = msg_send![capture_output_class, alloc];
let output: id = msg_send![output, init];
let surface_tx = image_buffer_tx.clone();
let callback = Box::new(move |buffer: CMSampleBufferRef| {
let buffer = CMSampleBuffer::wrap_under_get_rule(buffer);
let attachments = buffer.attachments();
let attachments = attachments.first().expect("no attachments for sample");
let string = bindings::SCStreamFrameInfoStatus.0 as CFStringRef;
let status = core_foundation::number::CFNumber::wrap_under_get_rule(
*attachments.get(string) as CFNumberRef,
)
.to_i64()
.expect("invalid frame info status");
if status != bindings::SCFrameStatus_SCFrameStatusComplete {
println!("received incomplete frame");
return;
}
let timing_info = buffer.sample_timing_info(0).unwrap();
let image_buffer = buffer.image_buffer();
// compression_session
// .encode_frame(&image_buffer, timing_info)
// .unwrap();
*surface_tx.lock().borrow_mut() = Some(image_buffer);
}) as Box<dyn FnMut(CMSampleBufferRef)>;
let callback = Box::into_raw(Box::new(callback));
(*output).set_ivar("callback", callback as *mut c_void);
let filter: id = msg_send![class!(SCContentFilter), alloc];
let filter: id = msg_send![filter, initWithDisplay: display includingApplications: applications exceptingWindows: nil];
// let filter: id = msg_send![filter, initWithDesktopIndependentWindow: window];
let config: id = msg_send![class!(SCStreamConfiguration), alloc];
let config: id = msg_send![config, init];
let _: () = msg_send![config, setWidth: display_width * 2];
let _: () = msg_send![config, setHeight: display_height * 2];
let _: () = msg_send![config, setMinimumFrameInterval: CMTimeMake(1, 60)];
let _: () = msg_send![config, setQueueDepth: 6];
let _: () = msg_send![config, setShowsCursor: YES];
let _: () = msg_send![
config,
setPixelFormat: media::core_video::kCVPixelFormatType_32BGRA
];
let stream: id = msg_send![class!(SCStream), alloc];
let stream: id = msg_send![stream, initWithFilter: filter configuration: config delegate: output];
let error: id = nil;
let queue = bindings::dispatch_queue_create(
ptr::null(),
bindings::NSObject(ptr::null_mut()),
);
let _: () = msg_send![stream,
addStreamOutput: output type: bindings::SCStreamOutputType_SCStreamOutputTypeScreen
sampleHandlerQueue: queue
error: &error
];
let start_capture_completion = ConcreteBlock::new(move |error: id| {
if !error.is_null() {
println!(
"error starting capture... error? {}",
string_from_objc(msg_send![error, localizedDescription])
);
return;
}
println!("starting capture");
});
assert!(!stream.is_null());
let _: () = msg_send![
stream,
startCaptureWithCompletionHandler: start_capture_completion
];
});
let _: id = msg_send![
class!(SCShareableContent),
getShareableContentWithCompletionHandler: block
];
}
cx.spawn_weak(|this, mut cx| async move {
while let Some(image_buffer) = image_buffer_rx.next().await {
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.image_buffer = image_buffer;
cx.notify();
})
} else {
break;
}
}
})
.detach();
pub fn new(_: &mut ViewContext<Self>) -> Self {
Self { image_buffer: None }
}
}
@ -288,32 +87,6 @@ impl gpui::View for ScreenCaptureView {
}
}
pub unsafe fn string_from_objc(string: id) -> String {
if string.is_null() {
Default::default()
} else {
let len = msg_send![string, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
let bytes = string.UTF8String() as *const u8;
str::from_utf8(slice::from_raw_parts(bytes, len))
.unwrap()
.to_string()
}
}
extern "C" fn sample_output(
this: &Object,
_: Sel,
_stream: id,
buffer: id,
_kind: SCStreamOutputType,
) {
unsafe {
let callback = *this.get_ivar::<*mut c_void>("callback");
let callback = &mut *(callback as *mut Box<dyn FnMut(CMSampleBufferRef)>);
(*callback)(buffer as CMSampleBufferRef);
}
}
fn quit(_: &Quit, cx: &mut gpui::MutableAppContext) {
cx.platform().quit();
}

View File

@ -9,6 +9,8 @@ path = "src/live_kit.rs"
doctest = false
[dependencies]
core-foundation = "0.9.3"
futures = "0.3"
[build-dependencies]
serde = { version = "1.0", features = ["derive", "rc"] }

View File

@ -10,3 +10,14 @@ public func LKRoomCreate() -> UnsafeMutableRawPointer {
public func LKRoomDestroy(ptr: UnsafeRawPointer) {
let _ = Unmanaged<Room>.fromOpaque(ptr).takeRetainedValue();
}
@_cdecl("LKRoomConnect")
public func LKRoomConnect(room: UnsafeRawPointer, url: CFString, token: CFString, callback: @escaping @convention(c) (UnsafeRawPointer) -> Void, callback_data: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue();
room.connect(url as String, token as String).then { _ in
callback(callback_data);
}.catch { error in
print(error);
};
}

View File

@ -37,6 +37,7 @@ fn main() {
}
fn build_bridge(swift_target: &SwiftTarget) {
println!("cargo:rerun-if-changed={}", SWIFT_PACKAGE_NAME);
let swift_package_root = swift_package_root();
if !Command::new("swift")
.args(&["build", "-c", &env::var("PROFILE").unwrap()])

View File

@ -1,8 +1,20 @@
use core_foundation::{
base::TCFType,
string::{CFString, CFStringRef},
};
use futures::{channel::oneshot, Future};
use std::ffi::c_void;
extern "C" {
fn LKRoomCreate() -> *const c_void;
fn LKRoomDestroy(ptr: *const c_void);
fn LKRoomDestroy(room: *const c_void);
fn LKRoomConnect(
room: *const c_void,
url: CFStringRef,
token: CFStringRef,
callback: extern "C" fn(*mut c_void) -> (),
callback_data: *mut c_void,
);
}
pub struct Room {
@ -15,6 +27,29 @@ impl Room {
native_room: unsafe { LKRoomCreate() },
}
}
pub fn connect(&self, url: &str, token: &str) -> impl Future<Output = ()> {
let url = CFString::new(url);
let token = CFString::new(token);
let (tx, rx) = oneshot::channel();
extern "C" fn did_connect(tx: *mut c_void) {
let tx = unsafe { Box::from_raw(tx as *mut oneshot::Sender<()>) };
let _ = tx.send(());
}
unsafe {
LKRoomConnect(
self.native_room,
url.as_concrete_TypeRef(),
token.as_concrete_TypeRef(),
did_connect,
Box::into_raw(Box::new(tx)) as *mut c_void,
)
}
async { rx.await.unwrap() }
}
}
impl Drop for Room {