Make alvr_client_core compilable for platforms != android

This commit is contained in:
Riccardo Zaglia 2022-09-18 00:47:44 +02:00
parent a263fa6563
commit 871c6d348a
12 changed files with 431 additions and 441 deletions

43
Cargo.lock generated
View File

@ -250,7 +250,7 @@ dependencies = [
"tokio-tungstenite",
"tokio-util",
"walkdir",
"webbrowser",
"webbrowser 0.8.0",
"winit",
]
@ -1246,7 +1246,7 @@ dependencies = [
"instant",
"smithay-clipboard",
"tracing",
"webbrowser",
"webbrowser 0.7.1",
"winit",
]
@ -2607,6 +2607,21 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b"
[[package]]
name = "ndk-glue"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d0c4a7b83860226e6b4183edac21851f05d5a51756e97a1144b7f5a6b63e65f"
dependencies = [
"lazy_static",
"libc",
"log",
"ndk 0.6.0",
"ndk-context",
"ndk-macro",
"ndk-sys 0.3.0",
]
[[package]]
name = "ndk-glue"
version = "0.7.0"
@ -4453,6 +4468,20 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webbrowser"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc6a3cffdb686fbb24d9fb8f03a213803277ed2300f11026a3afe1f108dc021b"
dependencies = [
"jni",
"ndk-glue 0.6.2",
"url",
"web-sys",
"widestring 0.5.1",
"winapi",
]
[[package]]
name = "webbrowser"
version = "0.8.0"
@ -4460,7 +4489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01d62aa75495ab67cdc273d0b95cc76bcedfea2ba28338a4cf9b4137949dfac5"
dependencies = [
"jni",
"ndk-glue",
"ndk-glue 0.7.0",
"objc",
"raw-window-handle 0.5.0",
"url",
@ -4609,6 +4638,12 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c"
[[package]]
name = "widestring"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
[[package]]
name = "widestring"
version = "1.0.2"
@ -4814,7 +4849,7 @@ dependencies = [
"log",
"mio",
"ndk 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ndk-glue",
"ndk-glue 0.7.0",
"objc",
"once_cell",
"parking_lot 0.12.1",

View File

@ -27,7 +27,7 @@ serde_json = "1"
settings-schema = { version = "0.0.1", features = ["rename_camel_case"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "time"] }
# Android
[target.'cfg(target_os = "android")'.dependencies]
android_logger = "0.11"
jni = "0.19"
ndk = { git = "https://github.com/zarik5/android-ndk-rs", rev = "f1d38a0", features = [

View File

@ -10,29 +10,43 @@ fn main() {
.map(|entry| entry.into_path())
.collect::<Vec<_>>();
let source_files_paths = if platform_name == "android" {
cpp_paths
.iter()
.filter_map(|path| {
path.extension()
.filter(|ext| ext.to_string_lossy() == "cpp")
.is_some()
.then(|| path.clone())
})
.collect()
} else {
vec![
PathBuf::new().join("cpp/fec.cpp"),
PathBuf::new().join("cpp/nal.cpp"),
]
};
let mut builder = &mut cc::Build::new();
builder = builder
.cpp(true)
.flag("-std=c++17")
.flag("-fexceptions")
.flag("-frtti")
.files(source_files_paths)
.include("cpp")
.include("cpp/gl_render_utils");
if platform_name == "android" {
let source_files_paths = cpp_paths.iter().filter(|path| {
path.extension()
.filter(|ext| ext.to_string_lossy() == "cpp")
.is_some()
});
builder = builder.cpp_link_stdlib("c++_static");
}
builder.compile("bindings");
cc::Build::new()
.cpp(true)
.flag("-std=c++17")
.flag("-fexceptions")
.flag("-frtti")
.files(source_files_paths)
.include("cpp")
.include("cpp/gl_render_utils")
.cpp_link_stdlib("c++_static")
.compile("bindings");
cc::Build::new()
.cpp(false)
.files(&["cpp/reedsolomon/rs.c"])
.compile("bindings_rs_c");
cc::Build::new()
.cpp(false)
.files(&["cpp/reedsolomon/rs.c"])
.compile("bindings_rs_c");
if platform_name == "android" {
println!("cargo:rustc-link-lib=log");
println!("cargo:rustc-link-lib=EGL");
println!("cargo:rustc-link-lib=GLESv3");

View File

@ -1,27 +1,29 @@
#include "fec.h"
#include "utils.h"
#include <assert.h>
#include <cstring>
static const int ALVR_MAX_VIDEO_BUFFER_SIZE = 1400;
static const int ALVR_FEC_SHARDS_MAX = 20;
inline int CalculateParityShards(int dataShards, int fecPercentage) {
int totalParityShards = (dataShards * fecPercentage + 99) / 100;
return totalParityShards;
int totalParityShards = (dataShards * fecPercentage + 99) / 100;
return totalParityShards;
}
// Calculate how many packet is needed for make signal shard.
inline int CalculateFECShardPackets(int len, int fecPercentage) {
// This reed solomon implementation accept only 255 shards.
// Normally, we use ALVR_MAX_VIDEO_BUFFER_SIZE as block_size and single packet becomes single shard.
// If we need more than maxDataShards packets, we need to combine multiple packet to make single shrad.
// NOTE: Moonlight seems to use only 255 shards for video frame.
int maxDataShards = ((ALVR_FEC_SHARDS_MAX - 2) * 100 + 99 + fecPercentage) / (100 + fecPercentage);
int minBlockSize = (len + maxDataShards - 1) / maxDataShards;
int shardPackets = (minBlockSize + ALVR_MAX_VIDEO_BUFFER_SIZE - 1) / ALVR_MAX_VIDEO_BUFFER_SIZE;
assert(maxDataShards + CalculateParityShards(maxDataShards, fecPercentage) <= ALVR_FEC_SHARDS_MAX);
return shardPackets;
// This reed solomon implementation accept only 255 shards.
// Normally, we use ALVR_MAX_VIDEO_BUFFER_SIZE as block_size and single packet becomes single
// shard. If we need more than maxDataShards packets, we need to combine multiple packet to make
// single shrad. NOTE: Moonlight seems to use only 255 shards for video frame.
int maxDataShards =
((ALVR_FEC_SHARDS_MAX - 2) * 100 + 99 + fecPercentage) / (100 + fecPercentage);
int minBlockSize = (len + maxDataShards - 1) / maxDataShards;
int shardPackets = (minBlockSize + ALVR_MAX_VIDEO_BUFFER_SIZE - 1) / ALVR_MAX_VIDEO_BUFFER_SIZE;
assert(maxDataShards + CalculateParityShards(maxDataShards, fecPercentage) <=
ALVR_FEC_SHARDS_MAX);
return shardPackets;
}
bool FECQueue::reed_solomon_initialized = false;
@ -44,26 +46,16 @@ FECQueue::~FECQueue() {
}
// Add packet to queue. packet must point to buffer whose size=ALVR_MAX_PACKET_SIZE.
void FECQueue::addVideoPacket(VideoFrame header, const unsigned char *payload, int payloadSize, bool &fecFailure) {
void FECQueue::addVideoPacket(VideoFrame header,
const unsigned char *payload,
int payloadSize,
bool &fecFailure) {
if (m_recovered && m_currentFrame.videoFrameIndex == header.videoFrameIndex) {
return;
}
if (m_currentFrame.videoFrameIndex != header.videoFrameIndex) {
// New frame
if (!m_recovered) {
FrameLog(m_currentFrame.trackingFrameIndex,
"Previous frame cannot be recovered. videoFrame=%llu shards=%u:%u frameByteSize=%d"
" fecPercentage=%d m_totalShards=%u m_shardPackets=%u m_blockSize=%u",
m_currentFrame.videoFrameIndex,
m_totalDataShards,
m_totalParityShards,
m_currentFrame.frameByteSize, m_currentFrame.fecPercentage, m_totalShards,
m_shardPackets, m_blockSize);
for (size_t packet = 0; packet < m_shardPackets; packet++) {
FrameLog(m_currentFrame.trackingFrameIndex,
"packetIndex=%d, shards=%u:%u",
packet, m_receivedDataShards[packet], m_receivedParityShards[packet]);
}
fecFailure = m_fecFailure = true;
}
m_currentFrame = header;
@ -72,15 +64,15 @@ void FECQueue::addVideoPacket(VideoFrame header, const unsigned char *payload, i
reed_solomon_release(m_rs);
}
uint32_t fecDataPackets = (header.frameByteSize + ALVR_MAX_VIDEO_BUFFER_SIZE - 1) /
ALVR_MAX_VIDEO_BUFFER_SIZE;
m_shardPackets = CalculateFECShardPackets(m_currentFrame.frameByteSize,
m_currentFrame.fecPercentage);
uint32_t fecDataPackets =
(header.frameByteSize + ALVR_MAX_VIDEO_BUFFER_SIZE - 1) / ALVR_MAX_VIDEO_BUFFER_SIZE;
m_shardPackets =
CalculateFECShardPackets(m_currentFrame.frameByteSize, m_currentFrame.fecPercentage);
m_blockSize = m_shardPackets * ALVR_MAX_VIDEO_BUFFER_SIZE;
m_totalDataShards = (m_currentFrame.frameByteSize + m_blockSize - 1) / m_blockSize;
m_totalParityShards = CalculateParityShards(m_totalDataShards,
m_currentFrame.fecPercentage);
m_totalParityShards =
CalculateParityShards(m_totalDataShards, m_currentFrame.fecPercentage);
m_totalShards = m_totalDataShards + m_totalParityShards;
m_recoveredPacket.clear();
@ -120,45 +112,29 @@ void FECQueue::addVideoPacket(VideoFrame header, const unsigned char *payload, i
// Calculate last packet counter of current frame to detect whole frame packet loss.
uint32_t startPacket;
uint32_t nextStartPacket;
if(m_currentFrame.fecIndex / m_shardPackets < m_totalDataShards) {
if (m_currentFrame.fecIndex / m_shardPackets < m_totalDataShards) {
// First seen packet was data packet
startPacket = m_currentFrame.packetCounter - m_currentFrame.fecIndex;
nextStartPacket = m_currentFrame.packetCounter - m_currentFrame.fecIndex + m_totalShards * m_shardPackets - padding;
}else{
nextStartPacket = m_currentFrame.packetCounter - m_currentFrame.fecIndex +
m_totalShards * m_shardPackets - padding;
} else {
// was parity packet
startPacket = m_currentFrame.packetCounter - (m_currentFrame.fecIndex - padding);
uint64_t m_startOfParityPacket = m_currentFrame.packetCounter - (m_currentFrame.fecIndex - m_totalDataShards * m_shardPackets);
uint64_t m_startOfParityPacket =
m_currentFrame.packetCounter -
(m_currentFrame.fecIndex - m_totalDataShards * m_shardPackets);
nextStartPacket = m_startOfParityPacket + m_totalParityShards * m_shardPackets;
}
if(m_firstPacketOfNextFrame != 0 && m_firstPacketOfNextFrame != startPacket) {
if (m_firstPacketOfNextFrame != 0 && m_firstPacketOfNextFrame != startPacket) {
// Whole frame packet loss
FrameLog(m_currentFrame.trackingFrameIndex,
"Previous frame was completely lost. videoFrame=%llu shards=%u:%u frameByteSize=%d fecPercentage=%d m_totalShards=%u "
"m_shardPackets=%u m_blockSize=%u m_firstPacketOfNextFrame=%u startPacket=%u currentPacket=%u",
m_currentFrame.videoFrameIndex, m_totalDataShards, m_totalParityShards,
m_currentFrame.frameByteSize, m_currentFrame.fecPercentage, m_totalShards,
m_shardPackets, m_blockSize, m_firstPacketOfNextFrame, startPacket, m_currentFrame.packetCounter);
for (size_t packet = 0; packet < m_shardPackets; packet++) {
FrameLog(m_currentFrame.trackingFrameIndex,
"packetIndex=%d, shards=%u:%u",
packet, m_receivedDataShards[packet], m_receivedParityShards[packet]);
}
fecFailure = m_fecFailure = true;
}
m_firstPacketOfNextFrame = nextStartPacket;
FrameLog(m_currentFrame.trackingFrameIndex,
"Start new frame. videoFrame=%llu frameByteSize=%d fecPercentage=%d m_totalDataShards=%u m_totalParityShards=%u"
" m_totalShards=%u m_shardPackets=%u m_blockSize=%u",
m_currentFrame.videoFrameIndex, m_currentFrame.frameByteSize, m_currentFrame.fecPercentage, m_totalDataShards,
m_totalParityShards, m_totalShards, m_shardPackets, m_blockSize);
}
size_t shardIndex = header.fecIndex / m_shardPackets;
size_t packetIndex = header.fecIndex % m_shardPackets;
if (m_marks[packetIndex][shardIndex] == 0) {
// Duplicate packet.
LOGI("Packet duplication. packetCounter=%d fecIndex=%d", header.packetCounter,
header.fecIndex);
return;
}
m_marks[packetIndex][shardIndex] = 0;
@ -190,59 +166,44 @@ bool FECQueue::reconstruct() {
}
if (m_receivedDataShards[packet] == m_totalDataShards) {
// We've received a full packet with no need for FEC.
//FrameLog(m_currentFrame.frameIndex, "No need for FEC. packetIndex=%d", packet);
m_recoveredPacket[packet] = true;
continue;
}
m_rs->shards = m_receivedDataShards[packet] +
m_receivedParityShards[packet]; //Don't let RS complain about missing parity packets
m_rs->shards =
m_receivedDataShards[packet] +
m_receivedParityShards[packet]; // Don't let RS complain about missing parity packets
if (m_rs->shards < (int) m_totalDataShards) {
if (m_rs->shards < (int)m_totalDataShards) {
// Not enough parity data
ret = false;
continue;
}
FrameLog(m_currentFrame.trackingFrameIndex,
"Recovering. packetIndex=%d receivedDataShards=%d/%d receivedParityShards=%d/%d",
packet, m_receivedDataShards[packet], m_totalDataShards,
m_receivedParityShards[packet], m_totalParityShards);
for (size_t i = 0; i < m_totalShards; i++) {
m_shards[i] = &m_frameBuffer[(i * m_shardPackets + packet) * ALVR_MAX_VIDEO_BUFFER_SIZE];
m_shards[i] =
&m_frameBuffer[(i * m_shardPackets + packet) * ALVR_MAX_VIDEO_BUFFER_SIZE];
}
int result = reed_solomon_reconstruct(m_rs, (unsigned char **) &m_shards[0],
int result = reed_solomon_reconstruct(m_rs,
(unsigned char **)&m_shards[0],
&m_marks[packet][0],
m_totalShards, ALVR_MAX_VIDEO_BUFFER_SIZE);
m_totalShards,
ALVR_MAX_VIDEO_BUFFER_SIZE);
m_recoveredPacket[packet] = true;
// We should always provide enough parity to recover the missing data successfully.
// If this fails, something is probably wrong with our FEC state.
if (result != 0) {
LOGE("reed_solomon_reconstruct failed.");
return false;
}
/*
for(int i = 0; i < m_totalShards * m_shardPackets; i++) {
char *p = &frameBuffer[ALVR_MAX_VIDEO_BUFFER_SIZE * i];
LOGI("Reconstructed packets. i=%d shardIndex=%d buffer=[%02X %02X %02X %02X %02X ...]", i, i / m_shardPackets, p[0], p[1], p[2], p[3], p[4]);
}*/
}
if (ret) {
m_recovered = true;
FrameLog(m_currentFrame.trackingFrameIndex, "Frame was successfully recovered by FEC.");
}
return ret;
}
const std::byte *FECQueue::getFrameBuffer() {
return &m_frameBuffer[0];
}
const std::byte *FECQueue::getFrameBuffer() { return &m_frameBuffer[0]; }
int FECQueue::getFrameByteSize() {
return m_currentFrame.frameByteSize;
}
int FECQueue::getFrameByteSize() { return m_currentFrame.frameByteSize; }
void FECQueue::clearFecFailure() {
m_fecFailure = false;
}
void FECQueue::clearFecFailure() { m_fecFailure = false; }

View File

@ -16,6 +16,15 @@ const int MAX_PROGRAM_TEXTURES = 8;
const int HUD_TEXTURE_WIDTH = 1280;
const int HUD_TEXTURE_HEIGHT = 720;
/// Integer version of ovrRectf
typedef struct Recti_
{
int x;
int y;
int width;
int height;
} Recti;
typedef struct {
std::vector<std::unique_ptr<gl_render_utils::Texture>> renderTargets;
std::vector<std::unique_ptr<gl_render_utils::RenderState>> renderStates;

View File

@ -4,14 +4,13 @@
#include "bindings.h"
#include "fec.h"
#include "utils.h"
static const std::byte NAL_TYPE_SPS = static_cast<const std::byte>(7);
static const std::byte H265_NAL_TYPE_VPS = static_cast<const std::byte>(32);
enum ALVR_CODEC {
ALVR_CODEC_H264 = 0,
ALVR_CODEC_H265 = 1,
ALVR_CODEC_H264 = 0,
ALVR_CODEC_H265 = 1,
};
void (*createDecoder)(const char *csd_0, int length);
@ -54,7 +53,10 @@ int findVPSSPS(const std::byte *frameBuffer, int frameByteSize) {
return -1;
}
bool processNalPacket(VideoFrame header, const unsigned char *payload, int payloadSize, bool &outHadFecFailure) {
bool processNalPacket(VideoFrame header,
const unsigned char *payload,
int payloadSize,
bool &outHadFecFailure) {
if (m_enableFEC) {
m_queue.addVideoPacket(header, payload, payloadSize, outHadFecFailure);
}
@ -86,10 +88,8 @@ bool processNalPacket(VideoFrame header, const unsigned char *payload, int paylo
int end = findVPSSPS(frameBuffer, frameByteSize);
if (end == -1) {
// Invalid frame.
LOG("Got invalid frame. Too large SPS or PPS?");
return false;
}
LOGI("Got frame=%d %d, Codec=%d", (std::int32_t)NALType, end, m_codec);
createDecoder((const char *)&frameBuffer[0], end);
pushNal(
(const char *)&frameBuffer[end], frameByteSize - end, header.trackingFrameIndex);

View File

@ -1,8 +0,0 @@
#include "utils.h"
#include <jni.h>
#include "bindings.h"
int gGeneralLogLevel = ANDROID_LOG_INFO;
int gSoundLogLevel = ANDROID_LOG_INFO;
int gSocketLogLevel = ANDROID_LOG_INFO;
long gDebugFlags = 0;

View File

@ -1,81 +1,39 @@
#ifndef ALVRCLIENT_UTILS_H
#define ALVRCLIENT_UTILS_H
#include <stdint.h>
#include <math.h>
#include <time.h>
#include <pthread.h>
#include <GLES3/gl3.h>
#include <android/log.h>
#include <string>
#include <VrApi_Types.h>
#include <GLES3/gl3.h>
//
// Logging
//
// Defined in utils.cpp. 0 means no log output.
extern int gGeneralLogLevel;
extern int gSoundLogLevel;
extern int gSocketLogLevel;
#define LOG(...) do { if(gGeneralLogLevel <= ANDROID_LOG_VERBOSE){__android_log_print(ANDROID_LOG_VERBOSE, "ALVR Native", __VA_ARGS__);} } while (false)
#define LOGI(...) do { if(gGeneralLogLevel <= ANDROID_LOG_INFO){__android_log_print(ANDROID_LOG_INFO, "ALVR Native", __VA_ARGS__);} } while (false)
#define LOGE(...) do { if(gGeneralLogLevel <= ANDROID_LOG_ERROR){__android_log_print(ANDROID_LOG_ERROR, "ALVR Native", __VA_ARGS__);} } while (false)
#define LOGSOUND(...) do { if(gSoundLogLevel <= ANDROID_LOG_VERBOSE){__android_log_print(ANDROID_LOG_VERBOSE, "ALVR Sound", __VA_ARGS__);} } while (false)
#define LOGSOUNDI(...) do { if(gSoundLogLevel <= ANDROID_LOG_INFO){__android_log_print(ANDROID_LOG_INFO, "ALVR Sound", __VA_ARGS__);} } while (false)
#define LOGSOCKET(...) do { if(gSocketLogLevel <= ANDROID_LOG_VERBOSE){__android_log_print(ANDROID_LOG_VERBOSE, "ALVR Socket", __VA_ARGS__);} } while (false)
#define LOGSOCKETI(...) do { if(gSocketLogLevel <= ANDROID_LOG_INFO){__android_log_print(ANDROID_LOG_INFO, "ALVR Socket", __VA_ARGS__);} } while (false)
static const int64_t USECS_IN_SEC = 1000 * 1000;
const bool gEnableFrameLog = false;
inline void FrameLog(uint64_t frameIndex, const char *format, ...)
{
if (!gEnableFrameLog) {
return;
}
char buf[10000];
va_list args;
va_start(args, format);
vsnprintf(buf, sizeof(buf), format, args);
va_end(args);
__android_log_print(ANDROID_LOG_VERBOSE, "FrameTracking", "[Frame %lu] %s", frameIndex, buf);
}
//
// GL Logging
//
#define CHECK_GL_ERRORS 1
#ifdef CHECK_GL_ERRORS
#define LOGI(...) \
do { \
__android_log_print(ANDROID_LOG_INFO, "[ALVR Native]", __VA_ARGS__); \
} while (false)
#define LOGE(...) \
do { \
__android_log_print(ANDROID_LOG_ERROR, "[ALVR Native]", __VA_ARGS__); \
} while (false)
static const char *GlErrorString(GLenum error) {
switch (error) {
case GL_NO_ERROR:
return "GL_NO_ERROR";
case GL_INVALID_ENUM:
return "GL_INVALID_ENUM";
case GL_INVALID_VALUE:
return "GL_INVALID_VALUE";
case GL_INVALID_OPERATION:
return "GL_INVALID_OPERATION";
case GL_INVALID_FRAMEBUFFER_OPERATION:
return "GL_INVALID_FRAMEBUFFER_OPERATION";
case GL_OUT_OF_MEMORY:
return "GL_OUT_OF_MEMORY";
default:
return "unknown";
case GL_NO_ERROR:
return "GL_NO_ERROR";
case GL_INVALID_ENUM:
return "GL_INVALID_ENUM";
case GL_INVALID_VALUE:
return "GL_INVALID_VALUE";
case GL_INVALID_OPERATION:
return "GL_INVALID_OPERATION";
case GL_INVALID_FRAMEBUFFER_OPERATION:
return "GL_INVALID_FRAMEBUFFER_OPERATION";
case GL_OUT_OF_MEMORY:
return "GL_OUT_OF_MEMORY";
default:
return "unknown";
}
}
[[maybe_unused]] static void GLCheckErrors(const char* file, int line) {
[[maybe_unused]] static void GLCheckErrors(const char *file, int line) {
const GLenum error = glGetError();
if (error == GL_NO_ERROR) {
return;
@ -84,58 +42,16 @@ static const char *GlErrorString(GLenum error) {
abort();
}
#define GL(func) func; GLCheckErrors(__FILE__, __LINE__ )
#else // CHECK_GL_ERRORS
#define GL(func) func;
#endif // CHECK_GL_ERRORS
//
// Utility
//
inline uint64_t getTimestampUs(){
timeval tv;
gettimeofday(&tv, NULL);
uint64_t Current = (uint64_t)tv.tv_sec * 1000 * 1000 + tv.tv_usec;
return Current;
}
//
// Utility
//
/// Integer version of ovrRectf
typedef struct Recti_
{
int x;
int y;
int width;
int height;
} Recti;
inline std::string GetStringFromJNIString(JNIEnv *env, jstring string){
const char *buf = env->GetStringUTFChars(string, 0);
std::string ret = buf;
env->ReleaseStringUTFChars(string, buf);
return ret;
}
inline double GetTimeInSeconds() {
struct timespec now;
clock_gettime(CLOCK_MONOTONIC, &now);
return (now.tv_sec * 1e9 + now.tv_nsec) * 0.000000001;
}
#define GL(func) \
func; \
GLCheckErrors(__FILE__, __LINE__)
// https://stackoverflow.com/a/26221725
template<typename ... Args>
std::string string_format( const std::string& format, Args ... args )
{
size_t size = snprintf( nullptr, 0, format.c_str(), args ... ) + 1; // Extra space for '\0'
std::unique_ptr<char[]> buf( new char[ size ] );
snprintf( buf.get(), size, format.c_str(), args ... );
return std::string( buf.get(), buf.get() + size - 1 ); // We don't want the '\0' inside
template <typename... Args> std::string string_format(const std::string &format, Args... args) {
size_t size = snprintf(nullptr, 0, format.c_str(), args...) + 1; // Extra space for '\0'
std::unique_ptr<char[]> buf(new char[size]);
snprintf(buf.get(), size, format.c_str(), args...);
return std::string(buf.get(), buf.get() + size - 1); // We don't want the '\0' inside
}
#endif //ALVRCLIENT_UTILS_H
#endif // ALVRCLIENT_UTILS_H

View File

@ -2,12 +2,12 @@
use crate::{
connection_utils::{self, ConnectionError},
decoder::DECODER_INIT_CONFIG,
platform,
statistics::StatisticsManager,
storage::Config,
AlvrEvent, VideoFrame, CONTROL_CHANNEL_SENDER, DECODER_DEQUEUER, DECODER_ENQUEUER,
DECODER_INIT_CONFIG, DISCONNECT_NOTIFIER, EVENT_QUEUE, IS_RESUMED, IS_STREAMING,
STATISTICS_MANAGER, STATISTICS_SENDER, TRACKING_SENDER, USE_OPENGL,
AlvrEvent, VideoFrame, CONTROL_CHANNEL_SENDER, DISCONNECT_NOTIFIER, EVENT_QUEUE, IS_RESUMED,
IS_STREAMING, STATISTICS_MANAGER, STATISTICS_SENDER, TRACKING_SENDER, USE_OPENGL,
};
use alvr_audio::{AudioDevice, AudioDeviceType};
use alvr_common::{prelude::*, ALVR_NAME, ALVR_VERSION};
@ -33,6 +33,9 @@ use tokio::{
time,
};
#[cfg(target_os = "android")]
use crate::decoder::{DECODER_DEQUEUER, DECODER_ENQUEUER};
#[cfg(target_os = "android")]
use crate::audio;
#[cfg(not(target_os = "android"))]
@ -105,6 +108,7 @@ fn set_loading_message(message: &str) {
}
}
#[cfg(target_os = "android")]
if USE_OPENGL.value() {
unsafe { crate::updateLobbyHudTexture(buffer.as_ptr()) };
}
@ -275,6 +279,7 @@ async fn connection_pipeline(
];
}
#[cfg(target_os = "android")]
unsafe {
crate::setStreamConfig(crate::StreamConfigInput {
viewWidth: config_packet.view_resolution_width,
@ -430,8 +435,11 @@ async fn connection_pipeline(
IS_STREAMING.set(false);
*DECODER_ENQUEUER.lock() = None;
*DECODER_DEQUEUER.lock() = None;
#[cfg(target_os = "android")]
{
*DECODER_ENQUEUER.lock() = None;
*DECODER_DEQUEUER.lock() = None;
}
}
}

View File

@ -0,0 +1,208 @@
use crate::{
platform, AlvrCodec, AlvrEvent, CONTROL_CHANNEL_SENDER, DISCONNECT_NOTIFIER, EVENT_QUEUE,
STATISTICS_MANAGER,
};
use alvr_common::{once_cell::sync::Lazy, parking_lot::Mutex, prelude::*, RelaxedAtomic};
use alvr_session::{CodecType, MediacodecDataType};
use alvr_sockets::ClientControlPacket;
use std::{
collections::VecDeque,
ffi::c_void,
os::raw::c_char,
ptr, thread,
time::{Duration, Instant},
};
#[cfg(target_os = "android")]
use crate::platform::{DecoderDequeuedData, VideoDecoderDequeuer, VideoDecoderEnqueuer};
pub struct DecoderInitConfig {
pub codec: CodecType,
pub options: Vec<(String, MediacodecDataType)>,
}
pub static DECODER_INIT_CONFIG: Lazy<Mutex<DecoderInitConfig>> = Lazy::new(|| {
Mutex::new(DecoderInitConfig {
codec: CodecType::H264,
options: vec![],
})
});
#[cfg(target_os = "android")]
pub static DECODER_ENQUEUER: Lazy<Mutex<Option<VideoDecoderEnqueuer>>> =
Lazy::new(|| Mutex::new(None));
#[cfg(target_os = "android")]
pub static DECODER_DEQUEUER: Lazy<Mutex<Option<VideoDecoderDequeuer>>> =
Lazy::new(|| Mutex::new(None));
pub static EXTERNAL_DECODER: RelaxedAtomic = RelaxedAtomic::new(false);
static NAL_QUEUE: Lazy<Mutex<VecDeque<(Duration, Vec<u8>)>>> =
Lazy::new(|| Mutex::new(VecDeque::new()));
static LAST_ENQUEUED_TIMESTAMPS: Lazy<Mutex<VecDeque<Duration>>> =
Lazy::new(|| Mutex::new(VecDeque::new()));
pub extern "C" fn create_decoder(buffer: *const c_char, length: i32) {
let mut csd_0 = vec![0; length as _];
unsafe { ptr::copy_nonoverlapping(buffer, csd_0.as_mut_ptr() as _, length as _) };
let config = DECODER_INIT_CONFIG.lock();
if EXTERNAL_DECODER.value() {
// duration == 0 is the flag to identify the config NALS
NAL_QUEUE.lock().push_back((Duration::ZERO, csd_0));
EVENT_QUEUE.lock().push_back(AlvrEvent::CreateDecoder {
codec: if matches!(config.codec, CodecType::H264) {
AlvrCodec::H264
} else {
AlvrCodec::H265
},
});
} else {
#[cfg(target_os = "android")]
if DECODER_ENQUEUER.lock().is_none() {
let (enqueuer, dequeuer) =
platform::video_decoder_split(config.codec, &csd_0, &config.options).unwrap();
*DECODER_ENQUEUER.lock() = Some(enqueuer);
*DECODER_DEQUEUER.lock() = Some(dequeuer);
if let Some(sender) = &*CONTROL_CHANNEL_SENDER.lock() {
sender.send(ClientControlPacket::RequestIdr).ok();
}
}
}
}
pub extern "C" fn push_nal(buffer: *const c_char, length: i32, timestamp_ns: u64) {
let timestamp = Duration::from_nanos(timestamp_ns);
{
let mut timestamps_lock = LAST_ENQUEUED_TIMESTAMPS.lock();
timestamps_lock.push_back(timestamp);
if timestamps_lock.len() > 20 {
timestamps_lock.pop_front();
}
}
let mut nal_buffer = vec![0; length as _];
unsafe { ptr::copy_nonoverlapping(buffer, nal_buffer.as_mut_ptr() as _, length as _) }
if EXTERNAL_DECODER.value() {
NAL_QUEUE.lock().push_back((timestamp, nal_buffer));
EVENT_QUEUE.lock().push_back(AlvrEvent::NalReady);
} else {
#[cfg(target_os = "android")]
if let Some(decoder) = &*DECODER_ENQUEUER.lock() {
show_err(decoder.push_frame_nal(timestamp, &nal_buffer, Duration::from_millis(500)));
} else if let Some(sender) = &*CONTROL_CHANNEL_SENDER.lock() {
sender.send(ClientControlPacket::RequestIdr).ok();
}
}
}
/// Call only with internal decoder
/// Returns frame timestamp in nanoseconds or -1 if no frame available. Returns an AHardwareBuffer
/// from out_buffer.
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern "C" fn alvr_wait_for_frame(out_buffer: *mut *mut c_void) -> i64 {
let timestamp = if let Some(decoder) = &*DECODER_DEQUEUER.lock() {
// Note on frame pacing: sometines there could be late frames stored inside the decoder,
// which are gradually drained by polling two frames per frame. But sometimes a frame could
// be received earlier than usual because of network jitter. In this case, if we polled the
// second frame immediately, the next frame would probably be late. To mitigate this
// scenario, a 5ms delay measurement is used to decide if to poll the second frame or not.
// todo: remove the 5ms "magic number" and implement proper phase sync measuring network
// jitter variance.
let start_instant = Instant::now();
match decoder.dequeue_frame(Duration::from_millis(50), Duration::from_millis(100)) {
Ok(DecoderDequeuedData::Frame {
buffer_ptr,
timestamp,
}) => {
if Instant::now() - start_instant < Duration::from_millis(5) {
debug!("Try draining extra decoder frame");
match decoder
.dequeue_frame(Duration::from_micros(1), Duration::from_millis(100))
{
Ok(DecoderDequeuedData::Frame {
buffer_ptr,
timestamp,
}) => {
*out_buffer = buffer_ptr;
Some(timestamp)
}
Ok(_) => {
// Note: data from first dequeue!
*out_buffer = buffer_ptr;
Some(timestamp)
}
Err(e) => {
error!("Error while decoder dequeue (2nd time): {e}");
DISCONNECT_NOTIFIER.notify_waiters();
None
}
}
} else {
*out_buffer = buffer_ptr;
Some(timestamp)
}
}
Ok(data) => {
info!("Decoder: no frame dequeued. {data:?}");
None
}
Err(e) => {
error!("Error while decoder dequeue: {e}");
DISCONNECT_NOTIFIER.notify_waiters();
None
}
}
} else {
thread::sleep(Duration::from_millis(5));
None
};
if let Some(timestamp) = timestamp {
if let Some(stats) = &mut *STATISTICS_MANAGER.lock() {
stats.report_frame_decoded(timestamp);
}
if !LAST_ENQUEUED_TIMESTAMPS.lock().contains(&timestamp) {
error!("Detected late decoder, disconnecting...");
DISCONNECT_NOTIFIER.notify_waiters();
}
timestamp.as_nanos() as i64
} else {
-1
}
}
/// Call only with external decoder
/// Returns the number of bytes of the next nal, or 0 if there are no nals ready.
/// If out_nal or out_timestamp_ns is null, no nal is dequeued. Use to get the nal allocation size.
/// Returns out_timestamp_ns == 0 if config NAL.
#[no_mangle]
pub extern "C" fn alvr_poll_nal(out_nal: *mut c_char, out_timestamp_ns: *mut u64) -> u64 {
let mut queue_lock = NAL_QUEUE.lock();
if let Some((timestamp, nal)) = queue_lock.pop_front() {
let nal_size = nal.len();
if !out_nal.is_null() && !out_timestamp_ns.is_null() {
unsafe {
ptr::copy_nonoverlapping(nal.as_ptr(), out_nal as _, nal_size);
*out_timestamp_ns = timestamp.as_nanos() as _;
}
} else {
queue_lock.push_front((timestamp, nal))
}
nal_size as u64
} else {
0
}
}

View File

@ -2,6 +2,7 @@
mod connection;
mod connection_utils;
mod decoder;
mod logging_backend;
mod platform;
mod statistics;
@ -12,10 +13,7 @@ mod audio;
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
use crate::{
platform::DecoderDequeuedData,
storage::{LOBBY_ROOM_BIN, LOBBY_ROOM_GLTF},
};
use crate::storage::{LOBBY_ROOM_BIN, LOBBY_ROOM_GLTF};
use alvr_audio::{AudioDevice, AudioDeviceType};
use alvr_common::{
glam::{Quat, UVec2, Vec2, Vec3},
@ -25,18 +23,18 @@ use alvr_common::{
RelaxedAtomic, ALVR_VERSION,
};
use alvr_events::ButtonValue;
use alvr_session::{AudioDeviceId, CodecType, MediacodecDataType};
use alvr_session::AudioDeviceId;
use alvr_sockets::{
BatteryPacket, ClientControlPacket, ClientStatistics, DeviceMotion, Fov, HeadsetInfoPacket,
Tracking, ViewsConfig,
};
use platform::{VideoDecoderDequeuer, VideoDecoderEnqueuer};
use decoder::EXTERNAL_DECODER;
use statistics::StatisticsManager;
use std::{
collections::VecDeque,
ffi::{c_void, CStr},
os::raw::c_char,
ptr, slice, thread,
ptr, slice,
time::{Duration, Instant},
};
use storage::Config;
@ -58,32 +56,16 @@ static PREFERRED_RESOLUTION: Lazy<Mutex<UVec2>> = Lazy::new(|| Mutex::new(UVec2:
static EVENT_QUEUE: Lazy<Mutex<VecDeque<AlvrEvent>>> = Lazy::new(|| Mutex::new(VecDeque::new()));
static LAST_ENQUEUED_TIMESTAMPS: Lazy<Mutex<VecDeque<Duration>>> =
Lazy::new(|| Mutex::new(VecDeque::new()));
pub struct DecoderInitConfig {
codec: CodecType,
options: Vec<(String, MediacodecDataType)>,
}
pub static DECODER_INIT_CONFIG: Lazy<Mutex<DecoderInitConfig>> = Lazy::new(|| {
Mutex::new(DecoderInitConfig {
codec: CodecType::H264,
options: vec![],
})
});
static DECODER_ENQUEUER: Lazy<Mutex<Option<VideoDecoderEnqueuer>>> = Lazy::new(|| Mutex::new(None));
static DECODER_DEQUEUER: Lazy<Mutex<Option<VideoDecoderDequeuer>>> = Lazy::new(|| Mutex::new(None));
static EXTERNAL_DECODER: RelaxedAtomic = RelaxedAtomic::new(false);
static NAL_QUEUE: Lazy<Mutex<VecDeque<(Duration, Vec<u8>)>>> =
Lazy::new(|| Mutex::new(VecDeque::new()));
static IS_RESUMED: RelaxedAtomic = RelaxedAtomic::new(false);
static IS_STREAMING: RelaxedAtomic = RelaxedAtomic::new(false);
static USE_OPENGL: RelaxedAtomic = RelaxedAtomic::new(true);
pub enum AlvrCodec {
H264,
H265,
}
#[repr(u8)]
pub enum AlvrEvent {
StreamingStarted {
@ -102,6 +84,9 @@ pub enum AlvrEvent {
frequency: f32,
amplitude: f32,
},
CreateDecoder {
codec: AlvrCodec,
},
NalReady,
}
@ -182,7 +167,8 @@ pub extern "C" fn alvr_log_time(tag: *const c_char) {
error!("[ALVR NATIVE] {tag}: {:?}", Instant::now());
}
// NB: context must be thread safe.
/// On non-Android platforms, java_vm and constext should be null.
/// NB: context must be thread safe.
#[no_mangle]
pub extern "C" fn alvr_initialize(
java_vm: *mut c_void,
@ -194,73 +180,24 @@ pub extern "C" fn alvr_initialize(
use_opengl: bool,
external_decoder: bool,
) {
unsafe { ndk_context::initialize_android_context(java_vm, context) };
#[cfg(target_os = "android")]
unsafe {
ndk_context::initialize_android_context(java_vm, context)
};
logging_backend::init_logging();
extern "C" fn create_decoder(buffer: *const c_char, length: i32) {
let mut csd_0 = vec![0; length as _];
unsafe { ptr::copy_nonoverlapping(buffer, csd_0.as_mut_ptr(), length as _) };
if EXTERNAL_DECODER.value() {
// duration == 0 is the flag to identify the config NALS
NAL_QUEUE.lock().push_back((Duration::ZERO, csd_0));
EVENT_QUEUE.lock().push_back(AlvrEvent::NalReady);
} else {
if DECODER_ENQUEUER.lock().is_none() {
let config = DECODER_INIT_CONFIG.lock();
let (enqueuer, dequeuer) =
platform::video_decoder_split(config.codec, &csd_0, &config.options).unwrap();
*DECODER_ENQUEUER.lock() = Some(enqueuer);
*DECODER_DEQUEUER.lock() = Some(dequeuer);
if let Some(sender) = &*CONTROL_CHANNEL_SENDER.lock() {
sender.send(ClientControlPacket::RequestIdr).ok();
}
}
}
}
extern "C" fn push_nal(buffer: *const c_char, length: i32, timestamp_ns: u64) {
let timestamp = Duration::from_nanos(timestamp_ns);
{
let mut timestamps_lock = LAST_ENQUEUED_TIMESTAMPS.lock();
timestamps_lock.push_back(timestamp);
if timestamps_lock.len() > 20 {
timestamps_lock.pop_front();
}
}
let mut nal_buffer = vec![0; length as _];
unsafe { ptr::copy_nonoverlapping(buffer, nal_buffer.as_mut_ptr(), length as _) }
if EXTERNAL_DECODER.value() {
NAL_QUEUE.lock().push_back((timestamp, nal_buffer));
EVENT_QUEUE.lock().push_back(AlvrEvent::NalReady);
} else {
if let Some(decoder) = &*DECODER_ENQUEUER.lock() {
show_err(decoder.push_frame_nal(
timestamp,
&nal_buffer,
Duration::from_millis(500),
));
} else if let Some(sender) = &*CONTROL_CHANNEL_SENDER.lock() {
sender.send(ClientControlPacket::RequestIdr).ok();
}
}
}
#[cfg(target_os = "android")]
unsafe {
LOBBY_ROOM_GLTF_PTR = LOBBY_ROOM_GLTF.as_ptr();
LOBBY_ROOM_GLTF_LEN = LOBBY_ROOM_GLTF.len() as _;
LOBBY_ROOM_BIN_PTR = LOBBY_ROOM_BIN.as_ptr();
LOBBY_ROOM_BIN_LEN = LOBBY_ROOM_BIN.len() as _;
}
createDecoder = Some(create_decoder);
pushNal = Some(push_nal);
unsafe {
createDecoder = Some(decoder::create_decoder);
pushNal = Some(decoder::push_nal);
}
// Make sure to reset config in case of version compat mismatch.
@ -269,11 +206,13 @@ pub extern "C" fn alvr_initialize(
Config::default().store();
}
#[cfg(target_os = "android")]
platform::try_get_microphone_permission();
USE_OPENGL.set(use_opengl);
EXTERNAL_DECODER.set(external_decoder);
#[cfg(target_os = "android")]
if use_opengl {
unsafe { initGraphicsNative() };
}
@ -320,13 +259,16 @@ pub unsafe extern "C" fn alvr_destroy() {
// shutdown and wait for tasks to finish
drop(RUNTIME.lock().take());
#[cfg(target_os = "android")]
if USE_OPENGL.value() {
destroyGraphicsNative();
}
}
/// If no OpenGL is selected, arguments are ignored
#[no_mangle]
pub unsafe extern "C" fn alvr_resume(swapchain_textures: *mut *const i32, swapchain_length: i32) {
#[cfg(target_os = "android")]
if USE_OPENGL.value() {
let resolution = *PREFERRED_RESOLUTION.lock();
prepareLobbyRoom(
@ -344,12 +286,13 @@ pub unsafe extern "C" fn alvr_resume(swapchain_textures: *mut *const i32, swapch
pub unsafe extern "C" fn alvr_pause() {
IS_RESUMED.set(false);
#[cfg(target_os = "android")]
if USE_OPENGL.value() {
destroyRenderers();
}
}
// Returns true if there was a new event
/// Returns true if there was a new event
#[no_mangle]
pub unsafe extern "C" fn alvr_poll_event(out_event: *mut AlvrEvent) -> bool {
if let Some(event) = EVENT_QUEUE.lock().pop_front() {
@ -362,6 +305,7 @@ pub unsafe extern "C" fn alvr_poll_event(out_event: *mut AlvrEvent) -> bool {
}
/// Call only when using OpenGL
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern "C" fn alvr_start_stream(
swapchain_textures: *mut *const i32,
@ -418,88 +362,8 @@ pub extern "C" fn alvr_send_playspace(width: f32, height: f32) {
}
}
/// Call only with internal decoder
/// Returns frame timestamp in nanoseconds or -1 if no frame available. Returns an AHardwareBuffer
/// from out_buffer.
#[no_mangle]
pub unsafe extern "C" fn alvr_wait_for_frame(out_buffer: *mut *mut c_void) -> i64 {
let timestamp = if let Some(decoder) = &*DECODER_DEQUEUER.lock() {
// Note on frame pacing: sometines there could be late frames stored inside the decoder,
// which are gradually drained by polling two frames per frame. But sometimes a frame could
// be received earlier than usual because of network jitter. In this case, if we polled the
// second frame immediately, the next frame would probably be late. To mitigate this
// scenario, a 5ms delay measurement is used to decide if to poll the second frame or not.
// todo: remove the 5ms "magic number" and implement proper phase sync measuring network
// jitter variance.
let start_instant = Instant::now();
match decoder.dequeue_frame(Duration::from_millis(50), Duration::from_millis(100)) {
Ok(DecoderDequeuedData::Frame {
buffer_ptr,
timestamp,
}) => {
if Instant::now() - start_instant < Duration::from_millis(5) {
debug!("Try draining extra decoder frame");
match decoder
.dequeue_frame(Duration::from_micros(1), Duration::from_millis(100))
{
Ok(DecoderDequeuedData::Frame {
buffer_ptr,
timestamp,
}) => {
*out_buffer = buffer_ptr;
Some(timestamp)
}
Ok(_) => {
// Note: data from first dequeue!
*out_buffer = buffer_ptr;
Some(timestamp)
}
Err(e) => {
error!("Error while decoder dequeue (2nd time): {e}");
DISCONNECT_NOTIFIER.notify_waiters();
None
}
}
} else {
*out_buffer = buffer_ptr;
Some(timestamp)
}
}
Ok(data) => {
info!("Decoder: no frame dequeued. {data:?}");
None
}
Err(e) => {
error!("Error while decoder dequeue: {e}");
DISCONNECT_NOTIFIER.notify_waiters();
None
}
}
} else {
thread::sleep(Duration::from_millis(5));
None
};
if let Some(timestamp) = timestamp {
if let Some(stats) = &mut *STATISTICS_MANAGER.lock() {
stats.report_frame_decoded(timestamp);
}
if !LAST_ENQUEUED_TIMESTAMPS.lock().contains(&timestamp) {
error!("Detected late decoder, disconnecting...");
DISCONNECT_NOTIFIER.notify_waiters();
}
timestamp.as_nanos() as i64
} else {
-1
}
}
/// Call only when using OpenGL
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern "C" fn alvr_render_lobby(
eye_inputs: *const AlvrEyeInput,
@ -536,6 +400,8 @@ pub unsafe extern "C" fn alvr_render_lobby(
}
/// Call only when using OpenGL
#[cfg(target_os = "android")]
#[no_mangle]
pub unsafe extern "C" fn alvr_render_stream(
swapchain_indices: *const i32,
@ -663,27 +529,3 @@ pub extern "C" fn alvr_report_frame_decoded(timestamp_ns: u64) {
stats.report_frame_decoded(Duration::from_nanos(timestamp_ns as _));
}
}
/// Call only with external decoder
/// Returns the number of bytes of the next nal, or 0 if there are no nals ready.
/// If out_nal or out_timestamp_ns is null, no nal is dequeued. Use to get the nal allocation size.
/// Returns out_timestamp_ns == 0 if config NAL.
#[no_mangle]
pub extern "C" fn alvr_poll_nal(out_nal: *mut c_char, out_timestamp_ns: *mut u64) -> u64 {
let mut queue_lock = NAL_QUEUE.lock();
if let Some((timestamp, nal)) = queue_lock.pop_front() {
let nal_size = nal.len();
if !out_nal.is_null() && !out_timestamp_ns.is_null() {
unsafe {
ptr::copy_nonoverlapping(nal.as_ptr(), out_nal, nal_size);
*out_timestamp_ns = timestamp.as_nanos() as _;
}
} else {
queue_lock.push_front((timestamp, nal))
}
nal_size as u64
} else {
0
}
}

View File

@ -1,8 +1,13 @@
// #[cfg(target_os = "android")]
#[cfg(target_os = "android")]
pub mod android;
// #[cfg(target_os = "android")]
#[cfg(target_os = "android")]
pub use android::{
context, device_name, try_get_microphone_permission, video_decoder_split, vm,
DecoderDequeuedData, VideoDecoderDequeuer, VideoDecoderEnqueuer,
};
#[cfg(not(target_os = "android"))]
pub fn device_name() -> String {
"Wired headset".into()
}