Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/release/2022.5.1' into HEAD
Browse files Browse the repository at this point in the history
  • Loading branch information
melpon committed Sep 24, 2022
2 parents 7070d7e + 163e938 commit e27fdb0
Show file tree
Hide file tree
Showing 11 changed files with 229 additions and 53 deletions.
14 changes: 14 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,20 @@

## develop

## 2022.5.1 (2022-09-24)

- [UPDATE] Sora C++ SDK を `2022.12.1` に上げる
- @melpon
- [FIX] カメラからのフレーム情報が縮小された状態で渡されていたのを修正
- @melpon

## 2022.5.0 (2022-09-22)

- [ADD] カメラからのフレーム情報を受け取るコールバックを追加
- @melpon
- [ADD] カメラデバイスの FPS の設定可能にする
- @melpon

## 2022.4.0

- [CHANGE] iOS ビルド向けに Bitcode を off にする設定を追加
Expand Down
34 changes: 34 additions & 0 deletions Sora/Sora.cs
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ public class Config
public string VideoCapturerDevice = "";
public int VideoWidth = 640;
public int VideoHeight = 480;
public int VideoFps = 30;
public VideoCodecType VideoCodecType = VideoCodecType.VP9;
public int VideoBitRate = 0;
public bool UnityAudioInput = false;
Expand Down Expand Up @@ -138,6 +139,7 @@ public class Config
GCHandle onDisconnectHandle;
GCHandle onDataChannelHandle;
GCHandle onHandleAudioHandle;
GCHandle onCapturerFrameHandle;
UnityEngine.Rendering.CommandBuffer commandBuffer;
UnityEngine.Camera unityCamera;

Expand Down Expand Up @@ -188,6 +190,11 @@ public void Dispose()
{
onHandleAudioHandle.Free();
}

if (onCapturerFrameHandle.IsAllocated)
{
onCapturerFrameHandle.Free();
}
}

public Sora()
Expand Down Expand Up @@ -248,6 +255,7 @@ public void Connect(Config config)
cc.video_capturer_device = config.VideoCapturerDevice;
cc.video_width = config.VideoWidth;
cc.video_height = config.VideoHeight;
cc.video_fps = config.VideoFps;
cc.video_codec_type = config.VideoCodecType.ToString();
cc.video_bit_rate = config.VideoBitRate;
cc.unity_audio_input = config.UnityAudioInput;
Expand Down Expand Up @@ -535,6 +543,30 @@ public Action<short[], int, int> OnHandleAudio
}
}

private delegate void CapturerFrameCallbackDelegate(string data, IntPtr userdata);

[AOT.MonoPInvokeCallback(typeof(CapturerFrameCallbackDelegate))]
static private void CapturerFrameCallback(string data, IntPtr userdata)
{
var callback = GCHandle.FromIntPtr(userdata).Target as Action<SoraConf.VideoFrame>;
var frame = Jsonif.Json.FromJson<SoraConf.VideoFrame>(data);
callback(frame);
}

public Action<SoraConf.VideoFrame> OnCapturerFrame
{
set
{
if (onCapturerFrameHandle.IsAllocated)
{
onCapturerFrameHandle.Free();
}

onCapturerFrameHandle = GCHandle.Alloc(value);
sora_set_on_capturer_frame(p, CapturerFrameCallback, GCHandle.ToIntPtr(onCapturerFrameHandle));
}
}

private delegate void StatsCallbackDelegate(string json, IntPtr userdata);

[AOT.MonoPInvokeCallback(typeof(StatsCallbackDelegate))]
Expand Down Expand Up @@ -689,6 +721,8 @@ public static bool IsH264Supported()
[DllImport(DllName)]
private static extern void sora_set_on_handle_audio(IntPtr p, HandleAudioCallbackDelegate on_handle_audio, IntPtr userdata);
[DllImport(DllName)]
private static extern void sora_set_on_capturer_frame(IntPtr p, CapturerFrameCallbackDelegate on_capturer_frame, IntPtr userdata);
[DllImport(DllName)]
private static extern void sora_get_stats(IntPtr p, StatsCallbackDelegate on_get_stats, IntPtr userdata);
[DllImport(DllName)]
private static extern void sora_send_message(IntPtr p, string label, [In] byte[] buf, int size);
Expand Down
4 changes: 2 additions & 2 deletions VERSION
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
SORA_UNITY_SDK_VERSION=2022.4.0-hololens2.0
SORA_CPP_SDK_VERSION=2022.11.1-hololens2.0
SORA_UNITY_SDK_VERSION=2022.5.1-hololens2.0
SORA_CPP_SDK_VERSION=2022.12.1-hololens2.0
WEBRTC_BUILD_VERSION=m105.5195.0.0-hololens2.0
BOOST_VERSION=1.80.0
CMAKE_VERSION=3.23.1
Expand Down
39 changes: 39 additions & 0 deletions proto/sora_conf.proto
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,43 @@ enum ErrorCode {
PEER_CONNECTION_STATE_FAILED = 8;
// ICE candidate の交換のどこかで失敗した
ICE_FAILED = 9;
}

message VideoFrameBuffer {
enum Type {
kNative = 0;
kI420 = 1;
kI420A = 2;
kI422 = 3;
kI444 = 4;
kI010 = 5;
kI210 = 6;
kNV12 = 7;
}
int64 baseptr = 1;
Type type = 2;
int32 width = 3;
int32 height = 4;

int32 i420_stride_y = 110;
int32 i420_stride_u = 111;
int32 i420_stride_v = 112;
int64 i420_data_y = 113;
int64 i420_data_u = 114;
int64 i420_data_v = 115;

int32 nv12_stride_y = 170;
int32 nv12_stride_uv = 171;
int64 nv12_data_y = 172;
int64 nv12_data_uv = 173;
}

message VideoFrame {
int64 baseptr = 1;
int32 id = 2;
int64 timestamp_us = 3;
uint32 timestamp = 4;
int64 ntp_time_ms = 5;
int32 rotation = 6;
VideoFrameBuffer video_frame_buffer = 7;
}
43 changes: 22 additions & 21 deletions proto/sora_conf_internal.proto
Original file line number Diff line number Diff line change
Expand Up @@ -48,27 +48,28 @@ message ConnectConfig {
bool audio = 21;
int32 video_width = 22;
int32 video_height = 23;
string video_codec_type = 24;
int32 video_bit_rate = 25;
bool unity_audio_input = 26;
bool unity_audio_output = 27;
string audio_recording_device = 28;
string audio_playout_device = 29;
string audio_codec_type = 30;
int32 audio_bit_rate = 31;
bool enable_data_channel_signaling = 32;
bool data_channel_signaling = 33;
int32 data_channel_signaling_timeout = 34;
bool enable_ignore_disconnect_websocket = 35;
bool ignore_disconnect_websocket = 36;
int32 disconnect_wait_timeout = 37;
repeated DataChannel data_channels = 38;
bool insecure = 39;
string bundle_id = 40;
string proxy_url = 41;
string proxy_username = 42;
string proxy_password = 43;
string proxy_agent = 44;
int32 video_fps = 24;
string video_codec_type = 25;
int32 video_bit_rate = 26;
bool unity_audio_input = 27;
bool unity_audio_output = 28;
string audio_recording_device = 29;
string audio_playout_device = 30;
string audio_codec_type = 31;
int32 audio_bit_rate = 32;
bool enable_data_channel_signaling = 33;
bool data_channel_signaling = 34;
int32 data_channel_signaling_timeout = 35;
bool enable_ignore_disconnect_websocket = 36;
bool ignore_disconnect_websocket = 37;
int32 disconnect_wait_timeout = 38;
repeated DataChannel data_channels = 39;
bool insecure = 40;
string bundle_id = 41;
string proxy_url = 42;
string proxy_username = 43;
string proxy_password = 44;
string proxy_agent = 45;

bool enable_mrc_hologram_composition_enabled = 100;
bool mrc_hologram_composition_enabled = 101;
Expand Down
67 changes: 58 additions & 9 deletions src/sora.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ Sora::~Sora() {
static_cast<sora::AndroidCapturer*>(capturer_.get())->Stop();
}
#endif
capturer_sink_ = nullptr;
capturer_ = nullptr;
unity_adm_ = nullptr;

Expand Down Expand Up @@ -117,6 +118,10 @@ void Sora::SetOnDisconnect(
void Sora::SetOnDataChannel(std::function<void(std::string)> on_data_channel) {
on_data_channel_ = std::move(on_data_channel);
}
void Sora::SetOnCapturerFrame(
std::function<void(std::string)> on_capturer_frame) {
on_capturer_frame_ = std::move(on_capturer_frame);
}

void Sora::DispatchEvents() {
auto self = shared_from_this();
Expand Down Expand Up @@ -287,14 +292,52 @@ void Sora::DoConnect(const sora_conf::internal::ConnectConfig& cc,
renderer_.reset(new UnityRenderer());

if (cc.role == "sendonly" || cc.role == "sendrecv") {
std::function<void(const webrtc::VideoFrame& frame)> on_frame;
if (on_capturer_frame_) {
on_frame = [on_frame =
on_capturer_frame_](const webrtc::VideoFrame& frame) {
sora_conf::VideoFrame f;
f.baseptr = reinterpret_cast<int64_t>(&frame);
f.id = frame.id();
f.timestamp_us = frame.timestamp_us();
f.timestamp = frame.timestamp();
f.ntp_time_ms = frame.ntp_time_ms();
f.rotation = (int)frame.rotation();
auto& v = f.video_frame_buffer;
auto vfb = frame.video_frame_buffer();
v.baseptr = reinterpret_cast<int64_t>(vfb.get());
v.type = (sora_conf::VideoFrameBuffer::Type)vfb->type();
v.width = vfb->width();
v.height = vfb->height();
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kI420) {
auto p = vfb->GetI420();
v.i420_stride_y = p->StrideY();
v.i420_stride_u = p->StrideU();
v.i420_stride_v = p->StrideV();
v.i420_data_y = reinterpret_cast<int64_t>(p->DataY());
v.i420_data_u = reinterpret_cast<int64_t>(p->DataU());
v.i420_data_v = reinterpret_cast<int64_t>(p->DataV());
}
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
auto p = vfb->GetNV12();
v.nv12_stride_y = p->StrideY();
v.nv12_stride_uv = p->StrideUV();
v.nv12_data_y = reinterpret_cast<int64_t>(p->DataY());
v.nv12_data_uv = reinterpret_cast<int64_t>(p->DataUV());
}
on_frame(jsonif::to_json(f));
};
}

auto capturer = CreateVideoCapturer(
cc.capturer_type, (void*)cc.unity_camera_texture,
cc.video_capturer_device, cc.video_width, cc.video_height,
signaling_thread_.get(), env, android_context
cc.video_capturer_device, cc.video_width, cc.video_height, cc.video_fps,
on_frame, signaling_thread_.get(), env, android_context
#ifdef SORA_UNITY_SDK_HOLOLENS2
, cc
,
cc
#endif
);
);
if (!capturer) {
on_disconnect((int)sora_conf::ErrorCode::INTERNAL_ERROR,
"Capturer Init Failed");
Expand Down Expand Up @@ -574,6 +617,8 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> Sora::CreateVideoCapturer(
std::string video_capturer_device,
int video_width,
int video_height,
int video_fps,
std::function<void(const webrtc::VideoFrame& frame)> on_frame,
rtc::Thread* signaling_thread,
void* jni_env,
void* android_context
Expand All @@ -587,8 +632,8 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> Sora::CreateVideoCapturer(
sora::CameraDeviceCapturerConfig config;
config.width = video_width;
config.height = video_height;
// TODO(melpon): framerate をちゃんと設定する
config.fps = 30;
config.fps = video_fps;
config.on_frame = on_frame;
config.device_name = video_capturer_device;
config.jni_env = jni_env;
config.application_context = android_context;
Expand Down Expand Up @@ -618,9 +663,12 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> Sora::CreateVideoCapturer(
return sora::CreateCameraDeviceCapturer(config);
} else {
// Unity のカメラからの映像を使う
return UnityCameraCapturer::Create(&UnityContext::Instance(),
unity_camera_texture, video_width,
video_height);
UnityCameraCapturerConfig config;
config.context = &UnityContext::Instance();
config.unity_camera_texture = unity_camera_texture;
config.width = video_width;
config.height = video_height;
return UnityCameraCapturer::Create(config);
}
}

Expand Down Expand Up @@ -783,4 +831,5 @@ void Sora::PushEvent(std::function<void()> f) {
std::lock_guard<std::mutex> guard(event_mutex_);
event_queue_.push_back(std::move(f));
}

} // namespace sora_unity_sdk
37 changes: 27 additions & 10 deletions src/sora.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ class Sora : public std::enable_shared_from_this<Sora>,
void SetOnMessage(std::function<void(std::string, std::string)> on_message);
void SetOnDisconnect(std::function<void(int, std::string)> on_disconnect);
void SetOnDataChannel(std::function<void(std::string)> on_data_channel);
void SetOnCapturerFrame(std::function<void(std::string)> on_capturer_frame);
void DispatchEvents();

void Connect(const sora_conf::internal::ConnectConfig& cc);
Expand Down Expand Up @@ -101,22 +102,36 @@ class Sora : public std::enable_shared_from_this<Sora>,
std::string audio_playout_device);

static rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
CreateVideoCapturer(int capturer_type,
void* unity_camera_texture,
std::string video_capturer_device,
int video_width,
int video_height,
rtc::Thread* signaling_thread,
void* jni_env,
void* android_context
CreateVideoCapturer(
int capturer_type,
void* unity_camera_texture,
std::string video_capturer_device,
int video_width,
int video_height,
int video_fps,
std::function<void(const webrtc::VideoFrame& frame)> on_frame,
rtc::Thread* signaling_thread,
void* jni_env,
void* android_context
#ifdef SORA_UNITY_SDK_HOLOLENS2
,
const sora_conf::internal::ConnectConfig& cc
,
const sora_conf::internal::ConnectConfig& cc
#endif
);

void PushEvent(std::function<void()> f);

struct CapturerSink : rtc::VideoSinkInterface<webrtc::VideoFrame> {
CapturerSink(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> capturer,
std::function<void(std::string)> on_frame);
~CapturerSink() override;
void OnFrame(const webrtc::VideoFrame& frame) override;

private:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> capturer_;
std::function<void(std::string)> on_frame_;
};

private:
std::unique_ptr<boost::asio::io_context> ioc_;
std::shared_ptr<sora::SoraSignaling> signaling_;
Expand All @@ -132,6 +147,7 @@ class Sora : public std::enable_shared_from_this<Sora>,
std::function<void(int, std::string)> on_disconnect_;
std::function<void(std::string)> on_data_channel_;
std::function<void(const int16_t*, int, int)> on_handle_audio_;
std::function<void(std::string)> on_capturer_frame_;

std::unique_ptr<rtc::Thread> io_thread_;
std::unique_ptr<rtc::Thread> network_thread_;
Expand All @@ -149,6 +165,7 @@ class Sora : public std::enable_shared_from_this<Sora>,

rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> capturer_;
int capturer_type_ = 0;
std::shared_ptr<CapturerSink> capturer_sink_;

rtc::scoped_refptr<UnityAudioDevice> unity_adm_;
webrtc::TaskQueueFactory* task_queue_factory_;
Expand Down
Loading

0 comments on commit e27fdb0

Please sign in to comment.