Eventually, we kill the mislabeling of FrameIndex in the packet. And we got smooth head tracking on Gear VR.

This is because buffering of NvEnc (nExtraOutputDelay) and misleading arguments of SubmitLayer.
This commit is contained in:
polygraphene 2018-05-20 02:26:31 +09:00
parent 6e7e657706
commit 36fc7cac52
33 changed files with 8358 additions and 804 deletions

View File

@ -0,0 +1,393 @@
#include "FrameRender.h"
#include "Utils.h"
#include "Logger.h"
#include "resource.h"
extern HINSTANCE g_hInstance;
static const char *VERTEX_SHADER =
"Texture2D txLeft : register(t0);\n"
"Texture2D txRight : register(t1);\n"
"SamplerState samLinear : register(s0);\n"
"\n"
"struct VS_INPUT\n"
"{\n"
" float4 Pos : POSITION;\n"
" float2 Tex : TEXCOORD0;\n"
"};\n"
"\n"
"struct PS_INPUT\n"
"{\n"
" float4 Pos : SV_POSITION;\n"
" float2 Tex : TEXCOORD0;\n"
"};\n"
"PS_INPUT VS(VS_INPUT input)\n"
"{\n"
" PS_INPUT output = (PS_INPUT)0;\n"
" output.Pos = input.Pos;\n"
" output.Tex = input.Tex;\n"
"\n"
" return output;\n"
"}\n"
"float4 PS(PS_INPUT input) : SV_Target\n"
"{\n"
//"float offset = (1448.0 - 1024.0) / 2 / 1448.0;\n"
"float offset = 0.0;\n"
"float shrink_to = 1.0 - offset * 2;\n"
"float x = input.Tex.x;\n"
"float y = input.Tex.y;\n"
" if (input.Tex.x < 0.5){\n"
" x = x * 2;\n"
" x = x * shrink_to + offset;\n"
" y = y * shrink_to + offset;\n"
" return txLeft.Sample(samLinear, float2(1.0 - x, 1.0 - y)); // We need this hack, because We cloud not resolve upside down issue by changing texcoord in buffer.\n"
" }else{\n"
" x = x * 2 - 1.0;\n"
" x = x * shrink_to + offset;\n"
" y = y * shrink_to + offset;\n"
" return txLeft.Sample(samLinear, float2(1.0 - x, 1.0 - y)); // We need this hack, because We cloud not resolve upside down issue by changing texcoord in buffer.\n"
" }\n"
"}\n";
static const char *PIXEL_SHADER = VERTEX_SHADER;
FrameRender::FrameRender(int renderWidth, int renderHeight, bool debugFrameIndex, CD3DRender *pD3DRender)
: m_renderWidth(renderWidth)
, m_renderHeight(renderHeight)
, m_pD3DRender(pD3DRender)
, m_debugFrameIndex(debugFrameIndex)
{
}
FrameRender::~FrameRender()
{
}
bool FrameRender::Startup(ID3D11Texture2D * pTexture[])
{
if (m_pStagingTexture) {
return true;
}
D3D11_TEXTURE2D_DESC srcDesc;
pTexture[0]->GetDesc(&srcDesc);
D3D11_TEXTURE2D_DESC stagingTextureDesc;
ZeroMemory(&stagingTextureDesc, sizeof(stagingTextureDesc));
stagingTextureDesc.Width = m_renderWidth * 2;
stagingTextureDesc.Height = m_renderHeight;
stagingTextureDesc.Format = srcDesc.Format;
stagingTextureDesc.MipLevels = 1;
stagingTextureDesc.ArraySize = 1;
stagingTextureDesc.SampleDesc.Count = 1;
stagingTextureDesc.Usage = D3D11_USAGE_DEFAULT;
//stagingTextureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
stagingTextureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
if (FAILED(m_pD3DRender->GetDevice()->CreateTexture2D(&stagingTextureDesc, NULL, &m_pStagingTexture)))
{
Log("Failed to create staging texture!");
return false;
}
HRESULT hr = m_pD3DRender->GetDevice()->CreateRenderTargetView(m_pStagingTexture.Get(), NULL, &m_pRenderTargetView);
if (FAILED(hr)) {
Log("CreateRenderTargetView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Create depth stencil texture
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory(&descDepth, sizeof(descDepth));
descDepth.Width = stagingTextureDesc.Width;
descDepth.Height = stagingTextureDesc.Height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
hr = m_pD3DRender->GetDevice()->CreateTexture2D(&descDepth, nullptr, &m_pDepthStencil);
if (FAILED(hr)) {
Log("CreateTexture2D %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Create the depth stencil view
D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
ZeroMemory(&descDSV, sizeof(descDSV));
descDSV.Format = descDepth.Format;
descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
hr = m_pD3DRender->GetDevice()->CreateDepthStencilView(m_pDepthStencil.Get(), &descDSV, &m_pDepthStencilView);
if (FAILED(hr)) {
Log("CreateDepthStencilView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
m_pD3DRender->GetContext()->OMSetRenderTargets(1, m_pRenderTargetView.GetAddressOf(), m_pDepthStencilView.Get());
D3D11_VIEWPORT viewport;
viewport.Width = (float)m_renderWidth * 2;
viewport.Height = (float)m_renderHeight;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
m_pD3DRender->GetContext()->RSSetViewports(1, &viewport);
ID3DBlob *vshader, *pshader, *error;
hr = D3DCompile(VERTEX_SHADER, strlen(VERTEX_SHADER), "vs", NULL, NULL, "VS", "vs_4_0", 0, 0, &vshader, &error);
Log("D3DCompile vs %p", hr);
if (FAILED(hr)) {
Log("%s", error->GetBufferPointer());
return false;
}
if (error != NULL) {
error->Release();
error = NULL;
}
hr = m_pD3DRender->GetDevice()->CreateVertexShader((const DWORD*)vshader->GetBufferPointer(), vshader->GetBufferSize(), NULL, &m_pVertexShader);
if (FAILED(hr)) {
Log("CreateVertexShader %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
hr = D3DCompile(VERTEX_SHADER, strlen(VERTEX_SHADER), "ps", NULL, NULL, "PS", "ps_4_0", 0, 0, &pshader, &error);
Log("D3DCompile ps %p", hr);
if (FAILED(hr)) {
Log("%s", error->GetBufferPointer());
return false;
}
if (error != NULL) {
error->Release();
}
hr = m_pD3DRender->GetDevice()->CreatePixelShader((const DWORD*)pshader->GetBufferPointer(), pshader->GetBufferSize(), NULL, &m_pPixelShader);
if (FAILED(hr)) {
Log("CreatePixelShader %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Define the input layout
D3D11_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
UINT numElements = ARRAYSIZE(layout);
// Create the input layout
hr = m_pD3DRender->GetDevice()->CreateInputLayout(layout, numElements, vshader->GetBufferPointer(),
vshader->GetBufferSize(), &m_pVertexLayout);
if (FAILED(hr)) {
Log("CreateInputLayout %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
vshader->Release();
// Set the input layout
m_pD3DRender->GetContext()->IASetInputLayout(m_pVertexLayout.Get());
// src textures has 1448x1448 pixels but dest texture(remote display) has 1024x1024 pixels.
// Apply offset to crop center of src textures.
float tex_offset = (1448 - 1024) / 2 / 1448.0;
tex_offset = 0;
// Create vertex buffer
SimpleVertex vertices[] =
{
{ DirectX::XMFLOAT3(-1.0f, -1.0f, 0.5f), DirectX::XMFLOAT2(1.0f - tex_offset, 0.0f + tex_offset) },
{ DirectX::XMFLOAT3(1.0f, 1.0f, 0.5f), DirectX::XMFLOAT2(0.0f + tex_offset, 1.0f - tex_offset) },
{ DirectX::XMFLOAT3(1.0f, -1.0f, 0.5f), DirectX::XMFLOAT2(0.0f + tex_offset, 0.0f + tex_offset) },
{ DirectX::XMFLOAT3(-1.0f, 1.0f, 0.5f), DirectX::XMFLOAT2(1.0f - tex_offset, 1.0f - tex_offset) },
};
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(SimpleVertex) * 4;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = vertices;
hr = m_pD3DRender->GetDevice()->CreateBuffer(&bd, &InitData, &m_pVertexBuffer);
if (FAILED(hr)) {
Log("CreateBuffer 1 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Set vertex buffer
UINT stride = sizeof(SimpleVertex);
UINT offset = 0;
m_pD3DRender->GetContext()->IASetVertexBuffers(0, 1, m_pVertexBuffer.GetAddressOf(), &stride, &offset);
// Create index buffer
// Create vertex buffer
WORD indices[] =
{
0,1,2,
0,3,1
};
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(WORD) * 6;
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = 0;
InitData.pSysMem = indices;
hr = m_pD3DRender->GetDevice()->CreateBuffer(&bd, &InitData, &m_pIndexBuffer);
if (FAILED(hr)) {
Log("CreateBuffer 2 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Set index buffer
m_pD3DRender->GetContext()->IASetIndexBuffer(m_pIndexBuffer.Get(), DXGI_FORMAT_R16_UINT, 0);
// Set primitive topology
m_pD3DRender->GetContext()->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create the sample state
D3D11_SAMPLER_DESC sampDesc;
ZeroMemory(&sampDesc, sizeof(sampDesc));
sampDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
sampDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
sampDesc.MinLOD = 0;
sampDesc.MaxLOD = D3D11_FLOAT32_MAX;
hr = m_pD3DRender->GetDevice()->CreateSamplerState(&sampDesc, &m_pSamplerLinear);
if (FAILED(hr)) {
Log("CreateSamplerState 5 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
HRSRC fontResource = FindResource(g_hInstance, MAKEINTRESOURCE(IDR_FONT), RT_RCDATA);
m_Font = std::make_unique<DirectX::SpriteFont>(m_pD3DRender->GetDevice(), L"C:\\src\\virtual_display\\driver_virtual_display\\resources\\inconsolata.spritefont");
m_SpriteBatch = std::make_unique<DirectX::SpriteBatch>(m_pD3DRender->GetContext());
Log("Staging Texture created");
return true;
}
bool FrameRender::RenderFrame(ID3D11Texture2D * pTexture[], int textureNum, const std::string& debugText)
{
D3D11_TEXTURE2D_DESC srcDesc;
pTexture[0]->GetDesc(&srcDesc);
Log("RenderFrame %dx%d %d", srcDesc.Width, srcDesc.Height, srcDesc.Format);
if (textureNum == 1) {
m_pD3DRender->GetContext()->CopyResource(m_pStagingTexture.Get(), pTexture[0]);
}
else {
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc = {};
SRVDesc.Format = srcDesc.Format;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
SRVDesc.Texture2D.MostDetailedMip = 0;
SRVDesc.Texture2D.MipLevels = 1;
HRESULT hr = m_pD3DRender->GetDevice()->CreateShaderResourceView(pTexture[0], &SRVDesc, m_pShaderResourceView[0].ReleaseAndGetAddressOf());
if (FAILED(hr)) {
Log("CreateShaderResourceView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
hr = m_pD3DRender->GetDevice()->CreateShaderResourceView(pTexture[1], &SRVDesc, m_pShaderResourceView[1].ReleaseAndGetAddressOf());
if (FAILED(hr)) {
Log("CreateShaderResourceView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
m_pD3DRender->GetContext()->OMSetRenderTargets(1, m_pRenderTargetView.GetAddressOf(), m_pDepthStencilView.Get());
D3D11_VIEWPORT viewport;
viewport.Width = (float)m_renderWidth * 2;
viewport.Height = (float)m_renderHeight;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
m_pD3DRender->GetContext()->RSSetViewports(1, &viewport);
// Set the input layout
m_pD3DRender->GetContext()->IASetInputLayout(m_pVertexLayout.Get());
// Set vertex buffer
UINT stride = sizeof(SimpleVertex);
UINT offset = 0;
m_pD3DRender->GetContext()->IASetVertexBuffers(0, 1, m_pVertexBuffer.GetAddressOf(), &stride, &offset);
// Set index buffer
m_pD3DRender->GetContext()->IASetIndexBuffer(m_pIndexBuffer.Get(), DXGI_FORMAT_R16_UINT, 0);
// Set primitive topology
m_pD3DRender->GetContext()->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Clear the back buffer
m_pD3DRender->GetContext()->ClearRenderTargetView(m_pRenderTargetView.Get(), DirectX::Colors::MidnightBlue);
// Clear the depth buffer to 1.0 (max depth)
m_pD3DRender->GetContext()->ClearDepthStencilView(m_pDepthStencilView.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
// Render the cube
m_pD3DRender->GetContext()->VSSetShader(m_pVertexShader.Get(), nullptr, 0);
m_pD3DRender->GetContext()->PSSetShader(m_pPixelShader.Get(), nullptr, 0);
ID3D11ShaderResourceView *shaderResourceView[2] = { m_pShaderResourceView[0].Get(), m_pShaderResourceView[1].Get() };
m_pD3DRender->GetContext()->PSSetShaderResources(0, 2, shaderResourceView);
//m_pD3DRender->GetContext()->PSSetShaderResources(0, 1, shaderResourceView);
m_pD3DRender->GetContext()->PSSetSamplers(0, 1, m_pSamplerLinear.GetAddressOf());
m_pD3DRender->GetContext()->DrawIndexed(6, 0, 0);
RenderDebugText(debugText);
m_pD3DRender->GetContext()->Flush();
}
return false;
}
void FrameRender::RenderDebugText(const std::string & debugText)
{
if (!m_debugFrameIndex) {
return;
}
m_SpriteBatch->Begin();
std::vector<wchar_t> buf(debugText.size() + 1);
_snwprintf_s(&buf[0], buf.size(), buf.size(), L"%hs", debugText.c_str());
DirectX::SimpleMath::Vector2 origin = m_Font->MeasureString(&buf[0]);
DirectX::SimpleMath::Vector2 FontPos;
FontPos.x = 100;
FontPos.y = 100;
m_Font->DrawString(m_SpriteBatch.get(), &buf[0],
FontPos, DirectX::Colors::Green, 0.f);
m_SpriteBatch->End();
}
ComPtr<ID3D11Texture2D> FrameRender::GetTexture()
{
return m_pStagingTexture;
}

View File

@ -0,0 +1,61 @@
#pragma once
#include <string>
#include <memory>
#include <stdint.h>
#include <d3d11.h>
#include <wrl.h>
#include <d3dcompiler.h>
#include <directxmath.h>
#include <directxcolors.h>
#include <SpriteFont.h>
#include <SimpleMath.h>
#include "d3drender.h"
using Microsoft::WRL::ComPtr;
class FrameRender
{
public:
FrameRender(int renderWidth, int renderHeight, bool debugFrameIndex, CD3DRender *pD3DRender);
virtual ~FrameRender();
bool Startup(ID3D11Texture2D *pTexture[]);
bool RenderFrame(ID3D11Texture2D *pTexture[], int textureNum, const std::string& debugText);
void RenderDebugText(const std::string& debugText);
ComPtr<ID3D11Texture2D> GetTexture();
private:
bool m_debugFrameIndex;
CD3DRender *m_pD3DRender;
int m_renderWidth;
int m_renderHeight;
ComPtr<ID3D11Texture2D> m_pStagingTexture;
ComPtr<ID3D11VertexShader> m_pVertexShader;
ComPtr<ID3D11PixelShader> m_pPixelShader;
ComPtr<ID3D11InputLayout> m_pVertexLayout;
ComPtr<ID3D11Buffer> m_pVertexBuffer;
ComPtr<ID3D11Buffer> m_pIndexBuffer;
ComPtr<ID3D11SamplerState> m_pSamplerLinear;
ComPtr<ID3D11Texture2D> m_pDepthStencil;
ComPtr<ID3D11ShaderResourceView> m_pShaderResourceView[2];
ComPtr<ID3D11RenderTargetView> m_pRenderTargetView;
ComPtr<ID3D11DepthStencilView> m_pDepthStencilView;
std::unique_ptr<DirectX::SpriteFont> m_Font;
std::unique_ptr<DirectX::SpriteBatch> m_SpriteBatch;
uint64_t m_frameIndex2;
struct SimpleVertex
{
DirectX::XMFLOAT3 Pos;
DirectX::XMFLOAT2 Tex;
};
};

View File

@ -80,6 +80,7 @@ public:
struct ChangeSettings {
uint32_t type; // 4
uint32_t enableTestMode;
uint32_t suspend;
};
#pragma pack(pop)
@ -90,6 +91,11 @@ public:
m_NewClientCallback = callback;
m_PoseUpdatedCallback = poseCallback;
memset(&m_TrackingInfo, 0, sizeof(m_TrackingInfo));
InitializeCriticalSection(&m_CS);
m_Settings.type = 4;
m_Settings.enableTestMode = 0;
m_Settings.suspend = 0;
m_Poller.reset(new Poller());
m_Socket.reset(new UdpSocket(host, port, m_Poller));
@ -98,6 +104,10 @@ public:
m_UseUdp = true;
}
~Listener() {
DeleteCriticalSection(&m_CS);
}
void Run() override
{
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_BELOW_NORMAL);
@ -125,7 +135,9 @@ public:
Log("Hello Message: %s", message->deviceName);
}
else if (type == 2 && len >= sizeof(TrackingInfo)) {
EnterCriticalSection(&m_CS);
m_TrackingInfo = *(TrackingInfo *)buf;
LeaveCriticalSection(&m_CS);
Log("got tracking info %d %f %f %f %f", (int)m_TrackingInfo.FrameIndex,
m_TrackingInfo.HeadPose_Pose_Orientation.x,
@ -167,15 +179,22 @@ public:
std::vector<std::string> commands;
if (m_ControlSocket->Recv(commands)) {
for (auto it = commands.begin(); it != commands.end(); ++it) {
if (*it == "EnableTestMode 0") {
SendChangeSettings(0);
}else if (*it == "EnableTestMode 1") {
SendChangeSettings(1);
}else if (*it == "EnableTestMode 2") {
SendChangeSettings(2);
}
else {
int split = it->find(" ");
if (split != -1) {
std::string commandName = it->substr(0, split);
std::string args = it->substr(split + 1);
if (commandName == "EnableTestMode") {
m_Settings.enableTestMode = atoi(args.c_str());
SendChangeSettings();
}
else if (commandName == "Suspend") {
m_Settings.suspend = atoi(args.c_str());
SendChangeSettings();
}
else {
Log("Invalid control command: %s", commandName.c_str());
}
}
}
}
@ -227,15 +246,11 @@ public:
}
}
void SendChangeSettings(int EnableTestMode) {
ChangeSettings settings;
settings.type = 4;
settings.enableTestMode = EnableTestMode;
void SendChangeSettings() {
if (!m_Socket->IsClientValid()) {
return;
}
m_Socket->Send((char *)&settings, sizeof(settings));
m_Socket->Send((char *)&m_Settings, sizeof(m_Settings));
}
void Stop()
@ -250,8 +265,10 @@ public:
return m_TrackingInfo.type == 2;
}
const TrackingInfo &GetTrackingInfo()const {
return m_TrackingInfo;
void GetTrackingInfo(TrackingInfo &info) {
EnterCriticalSection(&m_CS);
info = m_TrackingInfo;
LeaveCriticalSection(&m_CS);
}
uint64_t clientToServerTime(uint64_t clientTime) const {
@ -280,4 +297,7 @@ private:
TrackingInfo m_TrackingInfo;
uint64_t m_TimeDiff = 0;
CRITICAL_SECTION m_CS;
ChangeSettings m_Settings;
};

View File

@ -20,256 +20,41 @@
#include <locale>
#include <codecvt>
#ifdef _WIN32
#include <winsock.h>
#include <windows.h>
extern std::string g_DebugOutputDir;
#pragma comment(lib, "ws2_32.lib")
#undef ERROR
#else
#include <unistd.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#define SOCKET int
#define INVALID_SOCKET -1
#endif
enum LogLevel {
TRACE,
INFO,
WARNING,
ERROR,
FATAL
};
namespace simplelogger{
class Logger {
public:
Logger(LogLevel level, bool bPrintTimeStamp) : level(level), bPrintTimeStamp(bPrintTimeStamp) {}
virtual ~Logger() {}
virtual std::ostream& GetStream() = 0;
virtual void FlushStream() {}
bool ShouldLogFor(LogLevel l) {
return l >= level;
}
char* GetLead(LogLevel l, const char *szFile, int nLine, const char *szFunc) {
if (l < TRACE || l > FATAL) {
snprintf(szLead, sizeof(szLead), "[?????] ");
return szLead;
}
const char *szLevels[] = {"TRACE", "INFO", "WARN", "ERROR", "FATAL"};
if (bPrintTimeStamp) {
time_t t = time(NULL);
struct tm tm;
localtime_s(&tm, &t);
snprintf(szLead, sizeof(szLead), "[%-5s][%02d:%02d:%02d] ",
szLevels[l], tm.tm_hour, tm.tm_min, tm.tm_sec);
} else {
snprintf(szLead, sizeof(szLead), "[%-5s] ", szLevels[l]);
}
return szLead;
}
void EnterCriticalSection() {
mtx.lock();
}
void LeaveCriticalSection() {
mtx.unlock();
}
private:
LogLevel level;
char szLead[80];
bool bPrintTimeStamp;
std::mutex mtx;
};
class LoggerFactory {
public:
static Logger* CreateFileLogger(std::string strFilePath,
LogLevel level = INFO, bool bPrintTimeStamp = true) {
return new FileLogger(strFilePath, level, bPrintTimeStamp);
}
static Logger* CreateConsoleLogger(LogLevel level = INFO,
bool bPrintTimeStamp = true) {
return new ConsoleLogger(level, bPrintTimeStamp);
}
static Logger* CreateUdpLogger(char *szHost, unsigned uPort, LogLevel level = INFO,
bool bPrintTimeStamp = true) {
return new UdpLogger(szHost, uPort, level, bPrintTimeStamp);
}
private:
LoggerFactory() {}
class FileLogger : public Logger {
public:
FileLogger(std::string strFilePath, LogLevel level, bool bPrintTimeStamp)
: Logger(level, bPrintTimeStamp) {
pFileOut = new std::ofstream();
pFileOut->open(strFilePath.c_str());
}
~FileLogger() {
pFileOut->close();
}
std::ostream& GetStream() {
return *pFileOut;
}
private:
std::ofstream *pFileOut;
};
class ConsoleLogger : public Logger {
public:
ConsoleLogger(LogLevel level, bool bPrintTimeStamp)
: Logger(level, bPrintTimeStamp) {}
std::ostream& GetStream() {
return std::cout;
}
};
class UdpLogger : public Logger {
private:
class UdpOstream : public std::ostream {
public:
UdpOstream(char *szHost, unsigned short uPort) : std::ostream(&sb), socket(INVALID_SOCKET){
#ifdef _WIN32
WSADATA w;
if (WSAStartup(0x0101, &w) != 0) {
fprintf(stderr, "WSAStartup() failed.\n");
return;
}
#endif
socket = ::socket(AF_INET, SOCK_DGRAM, 0);
if (socket == INVALID_SOCKET) {
#ifdef _WIN32
WSACleanup();
#endif
fprintf(stderr, "socket() failed.\n");
return;
}
#ifdef _WIN32
unsigned int b1, b2, b3, b4;
sscanf_s(szHost, "%u.%u.%u.%u", &b1, &b2, &b3, &b4);
struct in_addr addr = {(unsigned char)b1, (unsigned char)b2, (unsigned char)b3, (unsigned char)b4};
#else
struct in_addr addr = {inet_addr(szHost)};
#endif
struct sockaddr_in s = {AF_INET, htons(uPort), addr};
server = s;
}
~UdpOstream() throw() {
if (socket == INVALID_SOCKET) {
return;
}
#ifdef _WIN32
closesocket(socket);
WSACleanup();
#else
close(socket);
#endif
}
void Flush() {
if (sendto(socket, sb.str().c_str(), (int)sb.str().length() + 1,
0, (struct sockaddr *)&server, (int)sizeof(sockaddr_in)) == -1) {
fprintf(stderr, "sendto() failed.\n");
}
sb.str("");
}
private:
std::stringbuf sb;
SOCKET socket;
struct sockaddr_in server;
};
public:
UdpLogger(char *szHost, unsigned uPort, LogLevel level, bool bPrintTimeStamp)
: Logger(level, bPrintTimeStamp), udpOut(szHost, (unsigned short)uPort) {}
UdpOstream& GetStream() {
return udpOut;
}
virtual void FlushStream() {
udpOut.Flush();
}
private:
UdpOstream udpOut;
};
};
class LogTransaction {
public:
LogTransaction(Logger *pLogger, LogLevel level, const char *szFile, const int nLine, const char *szFunc) : pLogger(pLogger), level(level) {
if (!pLogger) {
std::cout << "[-----] ";
return;
}
if (!pLogger->ShouldLogFor(level)) {
return;
}
pLogger->EnterCriticalSection();
pLogger->GetStream() << pLogger->GetLead(level, szFile, nLine, szFunc);
}
~LogTransaction() {
if (!pLogger) {
std::cout << std::endl;
return;
}
if (!pLogger->ShouldLogFor(level)) {
return;
}
pLogger->GetStream() << std::endl;
pLogger->FlushStream();
pLogger->LeaveCriticalSection();
if (level == FATAL) {
exit(1);
}
}
std::ostream& GetStream() {
if (!pLogger) {
return std::cout;
}
if (!pLogger->ShouldLogFor(level)) {
return ossNull;
}
return pLogger->GetStream();
}
private:
Logger *pLogger;
LogLevel level;
std::ostringstream ossNull;
};
static std::ofstream ofs;
static bool OpenFailed = false;
inline void OpenLog(const char *fileName) {
ofs.open(fileName);
}
extern simplelogger::Logger *logger;
#define LOG(level) simplelogger::LogTransaction(logger, level, __FILE__, __LINE__, __FUNCTION__).GetStream()
inline void Log(const char *pFormat, ...)
{
va_list args;
va_start(args, pFormat);
char buffer[10240];
vsprintf_s(buffer, pFormat, args);
va_end(args);
//vr::VRDriverLog()->Log( buffer );
//std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
//EventWriteString(converter.from_bytes(buffer).c_str());
FILETIME ft;
SYSTEMTIME st2, st;
uint64_t q;
if (!ofs.is_open()) {
return;
}
GetSystemTimeAsFileTime(&ft);
FileTimeToSystemTime(&ft, &st2);
SystemTimeToTzSpecificLocalTime(NULL, &st2, &st);
uint64_t q = (((uint64_t)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
q = (((uint64_t)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
q /= 10;
char timestamp[100];
snprintf(timestamp, sizeof(timestamp),
"[%02d:%02d:%02d.%03lld %03lld] ",
char buf[100];
snprintf(buf, sizeof(buf), "[%02d:%02d:%02d.%03lld %03lld] ",
st.wHour, st.wMinute, st.wSecond, q / 1000 % 1000, q % 1000);
logger->GetStream() << timestamp << buffer << std::endl;
OutputDebugString(buffer);
OutputDebugString("\r\n");
va_list args;
va_start(args, pFormat);
char buf2[10000];
vsnprintf(buf2, sizeof(buf2), pFormat, args);
va_end(args);
ofs << buf << buf2 << std::endl;
}

View File

@ -19,8 +19,6 @@
#include "Logger.h"
#include <thread>
extern simplelogger::Logger *logger;
#ifdef __cuda_cuda_h__
inline bool check(CUresult e, int iLine, const char *szFile) {
if (e != CUDA_SUCCESS) {
@ -74,7 +72,7 @@ inline bool check(NVENCSTATUS e, int iLine, const char *szFile) {
"NV_ENC_ERR_RESOURCE_NOT_MAPPED",
};
if (e != NV_ENC_SUCCESS) {
LOG(FATAL) << "NVENC error " << aszErrName[e] << " at line " << iLine << " in file " << szFile;
//LOG(FATAL) << "NVENC error " << aszErrName[e] << " at line " << iLine << " in file " << szFile;
return false;
}
return true;
@ -84,7 +82,7 @@ inline bool check(NVENCSTATUS e, int iLine, const char *szFile) {
#ifdef _WINERROR_
inline bool check(HRESULT e, int iLine, const char *szFile) {
if (e != S_OK) {
LOG(FATAL) << "HRESULT error 0x" << (void *)(intptr_t)e << " at line " << iLine << " in file " << szFile;
//LOG(FATAL) << "HRESULT error 0x" << (void *)(intptr_t)e << " at line " << iLine << " in file " << szFile;
return false;
}
return true;
@ -94,7 +92,7 @@ inline bool check(HRESULT e, int iLine, const char *szFile) {
#if defined(__gl_h_) || defined(__GL_H__)
inline bool check(GLenum e, int iLine, const char *szFile) {
if (e != 0) {
LOG(ERROR) << "GLenum error " << e << " at line " << iLine << " in file " << szFile;
//LOG(ERROR) << "GLenum error " << e << " at line " << iLine << " in file " << szFile;
return false;
}
return true;
@ -103,7 +101,7 @@ inline bool check(GLenum e, int iLine, const char *szFile) {
inline bool check(int e, int iLine, const char *szFile) {
if (e < 0) {
LOG(ERROR) << "General error " << e << " at line " << iLine << " in file " << szFile;
//LOG(ERROR) << "General error " << e << " at line " << iLine << " in file " << szFile;
return false;
}
return true;
@ -168,12 +166,12 @@ public:
try {
pBuf = new uint8_t[nSize];
if (nSize != st.st_size) {
LOG(WARNING) << "File is too large - only " << std::setprecision(4) << 100.0 * nSize / (uint32_t)st.st_size << "% is loaded";
//LOG(WARNING) << "File is too large - only " << std::setprecision(4) << 100.0 * nSize / (uint32_t)st.st_size << "% is loaded";
}
break;
} catch(std::bad_alloc) {
if (!bPartial) {
LOG(ERROR) << "Failed to allocate memory in BufferedReader";
//LOG(ERROR) << "Failed to allocate memory in BufferedReader";
return;
}
nSize = (uint32_t)(nSize * 0.9);
@ -183,7 +181,7 @@ public:
std::ifstream fpIn(szFileName, std::ifstream::in | std::ifstream::binary);
if (!fpIn)
{
LOG(ERROR) << "Unable to open input file: " << szFileName;
//LOG(ERROR) << "Unable to open input file: " << szFileName;
return;
}

View File

@ -83,6 +83,7 @@ bool SrtSocket::Poll() {
}
}
return false;
}
void SrtSocket::Shutdown() {

View File

@ -3,6 +3,7 @@
#include <Windows.h>
#include <stdint.h>
#include <string>
#include <d3d11.h>
#include "openvr_driver.h"
@ -52,4 +53,77 @@ inline std::string GetDxErrorStr(HRESULT hr) {
}
}
return ret;
}
}
inline void DrawDigitPixels(D3D11_MAPPED_SUBRESOURCE &mapped, int x, int y, int digit) {
static const char map[][15] = {
{ 1, 1, 1,
1, 0, 1,
1, 0, 1,
1, 0, 1,
1, 1, 1 },
{ 0, 1, 0,
1, 1, 0,
0, 1, 0,
0, 1, 0,
1, 1, 1 },
{ 1, 1, 0,
1, 0, 1,
0, 1, 0,
1, 0, 0,
1, 1, 1 },
{ 1, 1, 1,
0, 0, 1,
0, 1, 1,
0, 0, 1,
1, 1, 1 },
{ 1, 0, 1,
1, 0, 1,
1, 1, 1,
0, 0, 1,
0, 0, 1 },
{ 1, 1, 1,
1, 0, 0,
1, 1, 1,
0, 0, 1,
1, 1, 1 },
{ 1, 1, 0,
1, 0, 0,
1, 1, 1,
1, 0, 1,
1, 1, 1 },
{ 1, 1, 1,
0, 0, 1,
0, 1, 0,
0, 1, 0,
0, 1, 0 },
{ 1, 1, 1,
1, 0, 1,
1, 1, 1,
1, 0, 1,
1, 1, 1 },
{ 1, 1, 1,
1, 0, 1,
1, 1, 1,
0, 0, 1,
0, 0, 1 }
};
if (digit < 0 || 9 < digit) {
digit = 0;
}
uint8_t *p = (uint8_t *)mapped.pData;
for (int i = 0; i < 5 * 2; i++) {
for (int j = 0; j < 3 * 2; j++) {
if (map[digit][i / 2 * 3 + j / 2]) {
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 0] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 1] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 2] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 3] = 0xff;
}
}
}
}

View File

@ -11,169 +11,36 @@
#include "d3drender.h"
#include <winsock2.h>
#include <D3dx9core.h>
#include <d3d11.h>
#include <wrl.h>
#include <map>
#include <d3dcompiler.h>
#include <directxmath.h>
#include <directxcolors.h>
#include <d3d11_1.h>
#include <ScreenGrab.h>
#include <wincodec.h>
#include <wincodecsdk.h>
#include "NvEncoderD3D11.h"
#include "Logger.h"
#include "NvCodecUtils.h"
#include "SpriteFont.h"
#include "UdpSender.h"
#include "nvencoderclioptions.h"
#include "Listener.h"
#include "Utils.h"
#include "FrameRender.h"
simplelogger::Logger *logger = simplelogger::LoggerFactory::CreateConsoleLogger();
HINSTANCE g_hInstance;
std::string g_DebugOutputDir;
namespace
{
using Microsoft::WRL::ComPtr;
const char *VERTEX_SHADER =
"Texture2D txLeft : register(t0);\n"
"Texture2D txRight : register(t1);\n"
"SamplerState samLinear : register(s0);\n"
"\n"
"struct VS_INPUT\n"
"{\n"
" float4 Pos : POSITION;\n"
" float2 Tex : TEXCOORD0;\n"
"};\n"
"\n"
"struct PS_INPUT\n"
"{\n"
" float4 Pos : SV_POSITION;\n"
" float2 Tex : TEXCOORD0;\n"
"};\n"
"PS_INPUT VS(VS_INPUT input)\n"
"{\n"
" PS_INPUT output = (PS_INPUT)0;\n"
" output.Pos = input.Pos;\n"
" output.Tex = input.Tex;\n"
"\n"
" return output;\n"
"}\n"
"float4 PS(PS_INPUT input) : SV_Target\n"
"{\n"
//"float offset = (1448.0 - 1024.0) / 2 / 1448.0;\n"
"float offset = 0.0;\n"
"float shrink_to = 1.0 - offset * 2;\n"
"float x = input.Tex.x;\n"
"float y = input.Tex.y;\n"
" if (input.Tex.x < 0.5){\n"
" x = x * 2;\n"
" x = x * shrink_to + offset;\n"
" y = y * shrink_to + offset;\n"
" return txLeft.Sample(samLinear, float2(1.0 - x, 1.0 - y)); // We need this hack, because We cloud not resolve upside down issue by changing texcoord in buffer.\n"
" }else{\n"
" x = x * 2 - 1.0;\n"
" x = x * shrink_to + offset;\n"
" y = y * shrink_to + offset;\n"
" return txLeft.Sample(samLinear, float2(1.0 - x, 1.0 - y)); // We need this hack, because We cloud not resolve upside down issue by changing texcoord in buffer.\n"
" }\n"
"}\n";
const char *PIXEL_SHADER = VERTEX_SHADER;
void Test(CD3DRender *m_pD3DRender, ID3D11Texture2D *pTexture, int nHeight) {
D3D11_MAPPED_SUBRESOURCE mapped = { 0 };
if (SUCCEEDED(m_pD3DRender->GetContext()->Map(pTexture, 0, D3D11_MAP_READ, 0, &mapped)))
{
Log("[VDispDvr] Test Mapped Texture");
FILE *fp;
fopen_s(&fp, "C:\\src\\virtual_display\\test.bmp", "w");
fwrite(mapped.pData, mapped.RowPitch * nHeight, 1, fp);
fclose(fp);
m_pD3DRender->GetContext()->Unmap(pTexture, 0);
}
}
void DrawDigitPixels(D3D11_MAPPED_SUBRESOURCE &mapped, int x, int y, int digit) {
static const char map[][15] = {
{ 1, 1, 1,
1, 0, 1,
1, 0, 1,
1, 0, 1,
1, 1, 1},
{ 0, 1, 0,
1, 1, 0,
0, 1, 0,
0, 1, 0,
1, 1, 1},
{ 1, 1, 0,
1, 0, 1,
0, 1, 0,
1, 0, 0,
1, 1, 1},
{ 1, 1, 1,
0, 0, 1,
0, 1, 1,
0, 0, 1,
1, 1, 1},
{ 1, 0, 1,
1, 0, 1,
1, 1, 1,
0, 0, 1,
0, 0, 1},
{ 1, 1, 1,
1, 0, 0,
1, 1, 1,
0, 0, 1,
1, 1, 1},
{ 1, 1, 0,
1, 0, 0,
1, 1, 1,
1, 0, 1,
1, 1, 1},
{ 1, 1, 1,
0, 0, 1,
0, 1, 0,
0, 1, 0,
0, 1, 0},
{ 1, 1, 1,
1, 0, 1,
1, 1, 1,
1, 0, 1,
1, 1, 1 },
{ 1, 1, 1,
1, 0, 1,
1, 1, 1,
0, 0, 1,
0, 0, 1 }
};
if (digit < 0 || 9 < digit) {
digit = 0;
}
uint8_t *p = (uint8_t *)mapped.pData;
for (int i = 0; i < 5 * 2; i++) {
for (int j = 0; j < 3 * 2; j++) {
if (map[digit][i / 2 * 3 + j / 2]) {
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 0] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 1] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 2] = 0xff;
p[(y + i) * mapped.RowPitch + (x + j) * 4 + 3] = 0xff;
}
}
}
}
void DrawDebugTimestamp(CD3DRender *m_pD3DRender, ID3D11Texture2D *pTexture)
{
D3D11_MAPPED_SUBRESOURCE mapped = { 0 };
if (SUCCEEDED(m_pD3DRender->GetContext()->Map(pTexture, 0, D3D11_MAP_READ, 0, &mapped)))
HRESULT hr = m_pD3DRender->GetContext()->Map(pTexture, 0, D3D11_MAP_READ, 0, &mapped);
if (SUCCEEDED(hr))
{
int x = 10;
int y = 10;
@ -201,8 +68,38 @@ namespace
m_pD3DRender->GetContext()->Unmap(pTexture, 0);
}
else {
Log("DrawDebugTimestamp failed: %p %s", hr, GetDxErrorStr(hr).c_str());
}
}
void SaveDebugOutput(CD3DRender *m_pD3DRender, std::vector<std::vector<uint8_t>> &vPacket, ID3D11Texture2D *texture, uint64_t frameIndex) {
if (vPacket.size() == 0) {
return;
}
if (vPacket[0].size() < 10) {
return;
}
int type = vPacket[0][4] & 0x1F;
if (type == 7) {
// SPS, PPS, IDR
char filename[1000];
wchar_t filename2[1000];
snprintf(filename, sizeof(filename), "%s\\%llu.h264", g_DebugOutputDir.c_str(), frameIndex);
_snwprintf_s(filename2, sizeof(filename2), L"%hs\\%llu.dds", g_DebugOutputDir.c_str(), frameIndex);
FILE *fp;
fopen_s(&fp, filename, "wb");
if (fp) {
for (auto packet : vPacket) {
fwrite(&packet[0], packet.size(), 1, fp);
}
fclose(fp);
}
DirectX::SaveDDSTextureToFile(m_pD3DRender->GetContext(), texture, filename2);
}
}
inline vr::HmdQuaternion_t HmdQuaternion_Init(double w, double x, double y, double z)
{
@ -247,10 +144,11 @@ namespace
static const char * const k_pch_Settings_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons";
static const char * const k_pch_Settings_DisplayFrequency_Float = "displayFrequency";
static const char * const k_pch_Settings_EncoderOptions_String = "nvencOptions";
static const char * const k_pch_Settings_OutputFile_String = "outputFile";
static const char * const k_pch_Settings_ReplayFile_String = "replayFile";
static const char * const k_pch_Settings_LogFile_String = "logFile";
static const char * const k_pch_Settings_DebugTimestamp_Bool = "debugTimestamp";
static const char * const k_pch_Settings_DebugFrameIndex_Bool = "debugFrameIndex";
static const char * const k_pch_Settings_DebugFrameOutput_Bool = "debugFrameOutput";
static const char * const k_pch_Settings_DebugCaptureOutput_Bool = "debugCaptureOutput";
static const char * const k_pch_Settings_DebugOutputDir = "debugOutputDir";
static const char * const k_pch_Settings_ListenHost_String = "listenHost";
static const char * const k_pch_Settings_ListenPort_Int32 = "listenPort";
static const char * const k_pch_Settings_ControlListenHost_String = "controlListenHost";
@ -366,14 +264,17 @@ namespace
class CNvEncoder
{
public:
CNvEncoder(CD3DRender *pD3DRender)
CNvEncoder(CD3DRender *pD3DRender,
bool DebugTimestamp, bool DebugFrameOutput, bool DebugCaptureOutput)
: m_flFrameIntervalInSeconds( 0.0f )
, enc(NULL)
, m_pD3DRender(pD3DRender)
, m_bForceNv12(false)
, m_nFrame(0)
, m_Listener(NULL)
, m_DebugTimestamp(false)
, m_DebugTimestamp(DebugTimestamp)
, m_DebugFrameOutput(DebugFrameOutput)
, m_DebugCaptureOutput(DebugCaptureOutput)
{
}
@ -381,15 +282,14 @@ namespace
{}
bool Initialize(
std::string encoderOptions, std::string outputFile, std::string replayFile, Listener *listener,
std::string encoderOptions, Listener *listener,
uint32_t nWindowX, uint32_t nWindowY, uint32_t nWindowWidth, uint32_t nWindowHeight,
uint32_t nRefreshRateNumerator, uint32_t nRefreshRateDenominator,
bool DebugTimestamp)
uint32_t nRefreshRateNumerator, uint32_t nRefreshRateDenominator)
{
int nWidth = nWindowWidth;
int nHeight = nWindowHeight;
NvEncoderInitParam EncodeCLIOptions(encoderOptions.c_str());
m_DebugTimestamp = DebugTimestamp;
std::string outputFile = g_DebugOutputDir + "\\capture.h264";
if (nWindowWidth == 0 || nWindowHeight == 0 ||
nRefreshRateNumerator == 0 || nRefreshRateDenominator == 0)
@ -412,7 +312,7 @@ namespace
NV_ENC_BUFFER_FORMAT format = m_bForceNv12 ? NV_ENC_BUFFER_FORMAT_NV12 : NV_ENC_BUFFER_FORMAT_ARGB;
format = NV_ENC_BUFFER_FORMAT_ABGR;
enc = new NvEncoderD3D11(m_pD3DRender->GetDevice(), nWidth, nHeight, format);
enc = new NvEncoderD3D11(m_pD3DRender->GetDevice(), nWidth, nHeight, format, 0);
NV_ENC_INITIALIZE_PARAMS initializeParams = { NV_ENC_INITIALIZE_PARAMS_VER };
NV_ENC_CONFIG encodeConfig = { NV_ENC_CONFIG_VER };
@ -470,7 +370,7 @@ namespace
return m_flFrameIntervalInSeconds;
}
void Transmit(ID3D11Texture2D *pTexture, uint64_t presentationTime, uint64_t frameIndex, uint64_t clientTime)
void Transmit(ID3D11Texture2D *pTexture, uint64_t presentationTime, uint64_t frameIndex, uint64_t frameIndex2, uint64_t clientTime)
{
uint32_t nWidth;
uint32_t nHeight;
@ -481,11 +381,6 @@ namespace
Log("[VDispDvr] Transmit(begin)");
nWidth = min(desc.Width, SharedState_t::MAX_TEXTURE_WIDTH);
nHeight = min(desc.Height, SharedState_t::MAX_TEXTURE_HEIGHT);
Log("Transmit %dx%d %d", nWidth, nHeight, desc.Format);
const NvEncInputFrame* encoderInputFrame = enc->GetNextInputFrame();
if (m_DebugTimestamp) {
@ -501,22 +396,10 @@ namespace
{
ID3D11Texture2D *pTexBgra = reinterpret_cast<ID3D11Texture2D*>(encoderInputFrame->inputPtr);
Log("CopyResource start");
uint64_t start = GetTimestampUs();
D3D11_TEXTURE2D_DESC desc2;
pTexBgra->GetDesc(&desc2);
Log("%dx%d %d %d -> %dx%d %d %d",
desc.Width, desc.Height, desc.Format, desc.BindFlags,
desc2.Width, desc2.Height, desc2.Format, desc2.BindFlags);
m_pD3DRender->GetContext()->CopyResource(pTexBgra, pTexture);
uint64_t end = GetTimestampUs();
Log("CopyResource end %lld us", end - start);
}
uint64_t start = GetTimestampUs();
enc->EncodeFrame(vPacket);
uint64_t end = GetTimestampUs();
Log("EncodeFrame %lld us", end - start);
Log("Tracking info delay: %lld us", GetTimestampUs() - m_Listener->clientToServerTime(clientTime));
Log("Encoding delay: %lld us", GetTimestampUs() - presentationTime);
@ -531,6 +414,10 @@ namespace
}
}
if (m_DebugFrameOutput) {
SaveDebugOutput(m_pD3DRender, vPacket, reinterpret_cast<ID3D11Texture2D*>(encoderInputFrame->inputPtr), frameIndex2);
}
{
CSharedState::Ptr data(&m_sharedState);
data->m_flLastVsyncTimeInSeconds = SystemTime::GetInSeconds();
@ -560,6 +447,8 @@ namespace
Listener *m_Listener;
bool m_DebugTimestamp;
bool m_DebugFrameOutput;
bool m_DebugCaptureOutput;
};
//----------------------------------------------------------------------------
@ -570,15 +459,13 @@ namespace
class CEncoder : public CThread
{
public:
CEncoder( CD3DRender *pD3DRender, CNvEncoder *pRemoteDevice, int renderWidth, int renderHeight )
CEncoder( CD3DRender *pD3DRender, CNvEncoder *pRemoteDevice, int renderWidth, int renderHeight, bool debugFrameIndex )
: m_pRemoteDevice( pRemoteDevice )
, m_pD3DRender( pD3DRender )
, m_pStagingTexture( NULL )
, m_bExiting( false )
, m_frameIndex(0)
, m_renderWidth(renderWidth)
, m_renderHeight(renderHeight)
, m_frameIndex2(0)
{
m_FrameRender = new FrameRender(renderWidth, renderHeight, debugFrameIndex, pD3DRender);
m_encodeFinished.Set();
}
@ -586,300 +473,17 @@ namespace
{
}
bool CopyToStaging( ID3D11Texture2D *pTexture[], int textureNum, uint64_t presentationTime, uint64_t frameIndex, uint64_t clientTime )
bool CopyToStaging( ID3D11Texture2D *pTexture[], int textureNum, uint64_t presentationTime, uint64_t frameIndex, uint64_t clientTime, const std::string& debugText)
{
// Create a staging texture to copy frame data into that can in turn
// be read back (for blocking until rendering is finished).
if ( !m_pStagingTexture )
{
D3D11_TEXTURE2D_DESC srcDesc;
pTexture[0]->GetDesc( &srcDesc );
D3D11_TEXTURE2D_DESC stagingTextureDesc;
ZeroMemory( &stagingTextureDesc, sizeof( stagingTextureDesc ) );
stagingTextureDesc.Width = m_renderWidth * 2;
stagingTextureDesc.Height = m_renderHeight;
stagingTextureDesc.Format = srcDesc.Format;
stagingTextureDesc.MipLevels = 1;
stagingTextureDesc.ArraySize = 1;
stagingTextureDesc.SampleDesc.Count = 1;
stagingTextureDesc.Usage = D3D11_USAGE_DEFAULT;
//stagingTextureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
stagingTextureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
if ( FAILED( m_pD3DRender->GetDevice()->CreateTexture2D( &stagingTextureDesc, NULL, &m_pStagingTexture ) ) )
{
Log( "Failed to create staging texture!" );
return false;
}
HRESULT hr = m_pD3DRender->GetDevice()->CreateRenderTargetView(m_pStagingTexture.Get(), NULL, &m_pRenderTargetView);
if (FAILED(hr)) {
Log("CreateRenderTargetView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Create depth stencil texture
D3D11_TEXTURE2D_DESC descDepth;
ZeroMemory(&descDepth, sizeof(descDepth));
descDepth.Width = stagingTextureDesc.Width;
descDepth.Height = stagingTextureDesc.Height;
descDepth.MipLevels = 1;
descDepth.ArraySize = 1;
descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
descDepth.SampleDesc.Count = 1;
descDepth.SampleDesc.Quality = 0;
descDepth.Usage = D3D11_USAGE_DEFAULT;
descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
descDepth.CPUAccessFlags = 0;
descDepth.MiscFlags = 0;
hr = m_pD3DRender->GetDevice()->CreateTexture2D(&descDepth, nullptr, &m_pDepthStencil);
if (FAILED(hr)) {
Log("CreateTexture2D %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Create the depth stencil view
D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
ZeroMemory(&descDSV, sizeof(descDSV));
descDSV.Format = descDepth.Format;
descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
descDSV.Texture2D.MipSlice = 0;
hr = m_pD3DRender->GetDevice()->CreateDepthStencilView(m_pDepthStencil.Get(), &descDSV, &m_pDepthStencilView);
if (FAILED(hr)) {
Log("CreateDepthStencilView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
m_pD3DRender->GetContext()->OMSetRenderTargets(1, m_pRenderTargetView.GetAddressOf(), m_pDepthStencilView.Get());
D3D11_VIEWPORT viewport;
viewport.Width = (float)m_renderWidth * 2;
viewport.Height = (float)m_renderHeight;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
viewport.TopLeftX = 0;
viewport.TopLeftY = 0;
m_pD3DRender->GetContext()->RSSetViewports(1, &viewport);
ID3DBlob *vshader, *pshader, *error;
hr = D3DCompile(VERTEX_SHADER, strlen(VERTEX_SHADER), "vs", NULL, NULL, "VS", "vs_4_0", 0, 0, &vshader, &error);
Log("D3DCompile vs %p %s", hr, GetDxErrorStr(hr).c_str());
if (FAILED(hr)) {
Log("%s", error->GetBufferPointer());
return false;
}
if (error != NULL) {
error->Release();
error = NULL;
}
hr = m_pD3DRender->GetDevice()->CreateVertexShader((const DWORD*)vshader->GetBufferPointer(), vshader->GetBufferSize(), NULL, &m_pVertexShader);
if (FAILED(hr)) {
Log("CreateVertexShader %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
hr = D3DCompile(VERTEX_SHADER, strlen(VERTEX_SHADER), "ps", NULL, NULL, "PS", "ps_4_0", 0, 0, &pshader, &error);
Log("D3DCompile ps %p %s", hr, GetDxErrorStr(hr).c_str());
if (FAILED(hr)) {
Log("%s", error->GetBufferPointer());
return false;
}
if (error != NULL) {
error->Release();
}
hr = m_pD3DRender->GetDevice()->CreatePixelShader((const DWORD*)pshader->GetBufferPointer(), pshader->GetBufferSize(), NULL, &m_pPixelShader);
if (FAILED(hr)) {
Log("CreatePixelShader %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Define the input layout
D3D11_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
UINT numElements = ARRAYSIZE(layout);
// Create the input layout
hr = m_pD3DRender->GetDevice()->CreateInputLayout(layout, numElements, vshader->GetBufferPointer(),
vshader->GetBufferSize(), &m_pVertexLayout);
if (FAILED(hr)) {
Log("CreateInputLayout %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
vshader->Release();
// Set the input layout
m_pD3DRender->GetContext()->IASetInputLayout(m_pVertexLayout.Get());
// src textures has 1448x1448 pixels but dest texture(remote display) has 1024x1024 pixels.
// Apply offset to crop center of src textures.
float tex_offset = (1448 - 1024) / 2 / 1448.0;
tex_offset = 0;
// Create vertex buffer
SimpleVertex vertices[] =
{
{ DirectX::XMFLOAT3(-1.0f, -1.0f, 0.5f), DirectX::XMFLOAT2(1.0f - tex_offset, 0.0f + tex_offset) },
{ DirectX::XMFLOAT3( 1.0f, 1.0f, 0.5f), DirectX::XMFLOAT2(0.0f + tex_offset, 1.0f - tex_offset) },
{ DirectX::XMFLOAT3( 1.0f, -1.0f, 0.5f), DirectX::XMFLOAT2(0.0f + tex_offset, 0.0f + tex_offset) },
{ DirectX::XMFLOAT3(-1.0f, 1.0f, 0.5f), DirectX::XMFLOAT2(1.0f - tex_offset, 1.0f - tex_offset) },
};
D3D11_BUFFER_DESC bd;
ZeroMemory(&bd, sizeof(bd));
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(SimpleVertex) * 4;
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bd.CPUAccessFlags = 0;
D3D11_SUBRESOURCE_DATA InitData;
ZeroMemory(&InitData, sizeof(InitData));
InitData.pSysMem = vertices;
hr = m_pD3DRender->GetDevice()->CreateBuffer(&bd, &InitData, &m_pVertexBuffer);
if (FAILED(hr)) {
Log("CreateBuffer 1 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Set vertex buffer
UINT stride = sizeof(SimpleVertex);
UINT offset = 0;
m_pD3DRender->GetContext()->IASetVertexBuffers(0, 1, m_pVertexBuffer.GetAddressOf(), &stride, &offset);
// Create index buffer
// Create vertex buffer
WORD indices[] =
{
0,1,2,
0,3,1
};
bd.Usage = D3D11_USAGE_DEFAULT;
bd.ByteWidth = sizeof(WORD) * 6;
bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
bd.CPUAccessFlags = 0;
InitData.pSysMem = indices;
hr = m_pD3DRender->GetDevice()->CreateBuffer(&bd, &InitData, &m_pIndexBuffer);
if (FAILED(hr)) {
Log("CreateBuffer 2 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
// Set index buffer
m_pD3DRender->GetContext()->IASetIndexBuffer(m_pIndexBuffer.Get(), DXGI_FORMAT_R16_UINT, 0);
// Set primitive topology
m_pD3DRender->GetContext()->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create the sample state
D3D11_SAMPLER_DESC sampDesc;
ZeroMemory(&sampDesc, sizeof(sampDesc));
sampDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
sampDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
sampDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
sampDesc.MinLOD = 0;
sampDesc.MaxLOD = D3D11_FLOAT32_MAX;
hr = m_pD3DRender->GetDevice()->CreateSamplerState(&sampDesc, &m_pSamplerLinear);
if (FAILED(hr)) {
Log("CreateSamplerState 5 %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
Log("Staging Texture created");
}
D3D11_TEXTURE2D_DESC srcDesc;
pTexture[0]->GetDesc(&srcDesc);
Log("CopyToStaging 0 %dx%d %d", srcDesc.Width, srcDesc.Height, srcDesc.Format);
pTexture[1]->GetDesc(&srcDesc);
Log("CopyToStaging 1 %dx%d %d", srcDesc.Width, srcDesc.Height, srcDesc.Format);
m_presentationTime = presentationTime;
m_frameIndex = frameIndex;
m_clientTime = clientTime;
if (textureNum == 1) {
m_pD3DRender->GetContext()->CopyResource( m_pStagingTexture.Get(), pTexture[0] );
}
else {
D3D11_BOX box = { 0 };
box.right = srcDesc.Width;
box.bottom = srcDesc.Height;
box.back = 1;
//m_pD3DRender->GetContext()->CopyResource(m_pStagingTexture, pTexture[1]);
//m_pD3DRender->GetContext()->CopySubresourceRegion(m_pStagingTexture, 0, 0, 0, 0, pTexture[0], 0, 0);
//m_pD3DRender->GetContext()->CopySubresourceRegion(m_pStagingTexture.Get(), 0, 0, 0, 0, pTexture[0], 0, &box);
//m_pD3DRender->GetContext()->CopySubresourceRegion(m_pStagingTexture.Get(), 0, srcDesc.Width, 0, 0, pTexture[1], 0, &box);
m_FrameRender->Startup(pTexture);
m_pD3DRender->GetContext()->Flush();
//m_pD3DRender->GetContext()->Begin(NULL);
// Update our time
static float t = 0.0f;
static ULONGLONG timeStart = 0;
ULONGLONG timeCur = GetTickCount64();
if (timeStart == 0)
timeStart = timeCur;
t = (timeCur - timeStart) / 1000.0f;
float col = (GetTimestampUs() / 1000) / 10 % 256 / 256.0;
D3D11_SHADER_RESOURCE_VIEW_DESC SRVDesc = {};
SRVDesc.Format = srcDesc.Format;
SRVDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
SRVDesc.Texture2D.MostDetailedMip = 0;
SRVDesc.Texture2D.MipLevels = 1;
HRESULT hr = m_pD3DRender->GetDevice()->CreateShaderResourceView(pTexture[0], &SRVDesc, m_pShaderResourceView[0].ReleaseAndGetAddressOf());
if (FAILED(hr)) {
Log("CreateShaderResourceView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
hr = m_pD3DRender->GetDevice()->CreateShaderResourceView(pTexture[1], &SRVDesc, m_pShaderResourceView[1].ReleaseAndGetAddressOf());
if (FAILED(hr)) {
Log("CreateShaderResourceView %p %s", hr, GetDxErrorStr(hr).c_str());
return false;
}
//
// Clear the back buffer
//
float color[4] = { 1.0, col, 1.0, 1.0 };//DirectX::Colors::MidnightBlue
m_pD3DRender->GetContext()->ClearRenderTargetView(m_pRenderTargetView.Get(), color);
//
// Clear the depth buffer to 1.0 (max depth)
//
m_pD3DRender->GetContext()->ClearDepthStencilView(m_pDepthStencilView.Get(), D3D11_CLEAR_DEPTH, 1.0f, 0);
//
// Render the cube
//
m_pD3DRender->GetContext()->VSSetShader(m_pVertexShader.Get(), nullptr, 0);
m_pD3DRender->GetContext()->PSSetShader(m_pPixelShader.Get(), nullptr, 0);
ID3D11ShaderResourceView *shaderResourceView[2] = { m_pShaderResourceView[0].Get(), m_pShaderResourceView[1].Get() };
m_pD3DRender->GetContext()->PSSetShaderResources(0, 2, shaderResourceView);
//m_pD3DRender->GetContext()->PSSetShaderResources(0, 1, shaderResourceView);
m_pD3DRender->GetContext()->PSSetSamplers(0, 1, m_pSamplerLinear.GetAddressOf());
m_pD3DRender->GetContext()->DrawIndexed(6, 0, 0);
m_pD3DRender->GetContext()->Flush();
}
char buf[200];
snprintf(buf, sizeof(buf), "\nindex2: %llu", m_frameIndex2);
m_FrameRender->RenderFrame(pTexture, textureNum, debugText + buf);
return true;
}
@ -895,11 +499,13 @@ namespace
if ( m_bExiting )
break;
if ( m_pStagingTexture )
if ( m_FrameRender->GetTexture() )
{
m_pRemoteDevice->Transmit( m_pStagingTexture.Get(), m_presentationTime, m_frameIndex, m_clientTime);
m_pRemoteDevice->Transmit(m_FrameRender->GetTexture().Get(), m_presentationTime, m_frameIndex, m_frameIndex2, m_clientTime);
}
m_frameIndex2++;
m_encodeFinished.Set();
}
}
@ -909,6 +515,7 @@ namespace
m_bExiting = true;
m_newFrameReady.Set();
Join();
delete m_FrameRender;
}
void NewFrameReady( double flVsyncTimeInSeconds )
@ -927,36 +534,15 @@ namespace
private:
CThreadEvent m_newFrameReady, m_encodeFinished;
CNvEncoder *m_pRemoteDevice;
CD3DRender *m_pD3DRender;
double m_flVsyncTimeInSeconds;
bool m_bExiting;
uint64_t m_presentationTime;
uint64_t m_frameIndex;
uint64_t m_clientTime;
int m_renderWidth;
int m_renderHeight;
ComPtr<ID3D11Texture2D> m_pStagingTexture;
uint64_t m_frameIndex2;
ComPtr<ID3D11VertexShader> m_pVertexShader;
ComPtr<ID3D11PixelShader> m_pPixelShader;
ComPtr<ID3D11InputLayout> m_pVertexLayout;
ComPtr<ID3D11Buffer> m_pVertexBuffer;
ComPtr<ID3D11Buffer> m_pIndexBuffer;
ComPtr<ID3D11SamplerState> m_pSamplerLinear;
ComPtr<ID3D11Texture2D> m_pDepthStencil;
ComPtr<ID3D11ShaderResourceView> m_pShaderResourceView[2];
ComPtr<ID3D11RenderTargetView> m_pRenderTargetView;
ComPtr<ID3D11DepthStencilView> m_pDepthStencilView;
struct SimpleVertex
{
DirectX::XMFLOAT3 Pos;
DirectX::XMFLOAT2 Tex;
};
FrameRender *m_FrameRender;
};
}
@ -999,12 +585,10 @@ public:
, m_pRemoteDevice(NULL)
, m_pEncoder(NULL)
, m_EncoderOptions("")
, m_DebugTimestamp(false)
, m_Listener(NULL)
, m_VSyncThread(NULL)
, m_poseMutex(NULL)
{
std::string logFile;
std::string host, control_host;
int port, control_port;
@ -1031,12 +615,10 @@ public:
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_EncoderOptions_String, buf, sizeof(buf));
m_EncoderOptions = buf;
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_OutputFile_String, buf, sizeof(buf));
m_OutputFile = buf;
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_LogFile_String, buf, sizeof(buf));
logFile = buf;
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_ReplayFile_String, buf, sizeof(buf));
m_ReplayFile = buf;
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_DebugOutputDir, buf, sizeof(buf));
g_DebugOutputDir = buf;
vr::VRSettings()->GetString(k_pch_Settings_Section, k_pch_Settings_SrtOptions_String, buf, sizeof(buf));
std::string SrtOptions = buf;
@ -1049,12 +631,11 @@ public:
control_host = buf;
control_port = vr::VRSettings()->GetInt32(k_pch_Settings_Section, k_pch_Settings_ControlListenPort_Int32);
m_DebugTimestamp = vr::VRSettings()->GetBool(k_pch_Settings_Section, k_pch_Settings_DebugTimestamp_Bool);
bool DebugTimestamp = vr::VRSettings()->GetBool(k_pch_Settings_Section, k_pch_Settings_DebugTimestamp_Bool);
bool DebugFrameIndex = vr::VRSettings()->GetBool(k_pch_Settings_Section, k_pch_Settings_DebugFrameIndex_Bool);
bool DebugFrameOutput = vr::VRSettings()->GetBool(k_pch_Settings_Section, k_pch_Settings_DebugFrameOutput_Bool);
bool DebugCaptureOutput = vr::VRSettings()->GetBool(k_pch_Settings_Section, k_pch_Settings_DebugCaptureOutput_Bool);
logger = simplelogger::LoggerFactory::CreateFileLogger(logFile);
float originalIPD = vr::VRSettings()->GetFloat(vr::k_pch_SteamVR_Section, vr::k_pch_SteamVR_IPD_Float);
m_flIPD = vr::VRSettings()->GetFloat(k_pch_Settings_Section, k_pch_Settings_IPD_Float);
@ -1069,9 +650,6 @@ public:
Log("driver_null: IPD: %f", m_flIPD);
Log("driver_null: EncoderOptions: %s%s", m_EncoderOptions.c_str(), m_EncoderOptions.size() == sizeof(buf) - 1 ? " (Maybe truncated)" : "");
Log("driver_null: OutputFile: %s%s", m_OutputFile.c_str(), m_OutputFile.size() == sizeof(buf) - 1 ? " (Maybe truncated)" : "");
Log("driver_null: ReplayFile: %s%s", m_ReplayFile.c_str(), m_ReplayFile.size() == sizeof(buf) - 1 ? " (Maybe truncated)" : "");
//CDisplayRedirectLatest()
@ -1168,18 +746,17 @@ public:
m_Listener->Start();
// Spawn our separate process to manage headset presentation.
m_pRemoteDevice = new CNvEncoder(m_pD3DRender);
m_pRemoteDevice = new CNvEncoder(m_pD3DRender, DebugTimestamp, DebugFrameOutput, DebugCaptureOutput);
if (!m_pRemoteDevice->Initialize(
m_EncoderOptions, m_OutputFile, m_ReplayFile, m_Listener,
m_EncoderOptions, m_Listener,
nDisplayX, nDisplayY, nDisplayWidth, nDisplayHeight,
nDisplayRefreshRateNumerator, nDisplayRefreshRateDenominator,
m_DebugTimestamp))
nDisplayRefreshRateNumerator, nDisplayRefreshRateDenominator))
{
return;
}
// Spin up a separate thread to handle the overlapped encoding/transmit step.
m_pEncoder = new CEncoder(m_pD3DRender, m_pRemoteDevice, m_nRenderWidth, m_nRenderHeight);
m_pEncoder = new CEncoder(m_pD3DRender, m_pRemoteDevice, m_nRenderWidth, m_nRenderHeight, DebugFrameIndex);
m_pEncoder->Start();
m_VSyncThread = new VSyncThread();
@ -1426,7 +1003,8 @@ public:
pose.qRotation = HmdQuaternion_Init(1, 0, 0, 0);
if (m_Listener->HasValidTrackingInfo()) {
auto& info = m_Listener->GetTrackingInfo();
Listener::TrackingInfo info;
m_Listener->GetTrackingInfo(info);
uint64_t trackingDelay = GetTimestampUs() - m_Listener->clientToServerTime(info.clientTime);
Log("Tracking elapsed:%lld us %lld quot:%f,%f,%f,%f\nposition:%f,%f,%f\nView[0]:\n%sProj[0]:\n%sView[1]:\n%sProj[1]:\n%s",
@ -1505,7 +1083,7 @@ public:
if (m_unObjectId != vr::k_unTrackedDeviceIndexInvalid)
{
Log("RunFrame");
vr::VRServerDriverHost()->TrackedDevicePoseUpdated(m_unObjectId, GetPose(), sizeof(vr::DriverPose_t));
//vr::VRServerDriverHost()->TrackedDevicePoseUpdated(m_unObjectId, GetPose(), sizeof(vr::DriverPose_t));
}
}
@ -1529,9 +1107,6 @@ private:
float m_flIPD;
std::string m_EncoderOptions;
std::string m_OutputFile;
std::string m_ReplayFile;
bool m_DebugTimestamp;
uint64_t m_LastReferencedFrameIndex;
uint64_t m_LastReferencedClientTime;
@ -1623,7 +1198,7 @@ public:
//Log("[VDispDvr] Flush-End");
// Copy entire texture to staging so we can read the pixels to send to remote device.
m_pEncoder->CopyToStaging(&pTexture, 1, presentationTime, m_LastReferencedFrameIndex, m_LastReferencedClientTime);
m_pEncoder->CopyToStaging(&pTexture, 1, presentationTime, m_LastReferencedFrameIndex, m_LastReferencedClientTime, std::string(""));
//Log("[VDispDvr] Flush-Staging(begin)");
@ -1797,7 +1372,7 @@ public:
/** Used to purge all texture sets for a given process. */
virtual void DestroyAllSwapTextureSets(uint32_t unPid) {
Log("DestroyAllSwapTextureSets");
Log("DestroyAllSwapTextureSets %d", unPid);
for (auto it = m_handleMap.begin(); it != m_handleMap.end();) {
if (it->second.first->pid == unPid) {
@ -1859,15 +1434,29 @@ public:
}
if (minIt != m_poseBuffer.end()) {
// found the frameIndex
m_prevSubmitFrameIndex = m_submitFrameIndex;
m_prevSubmitClientTime = m_submitClientTime;
m_submitFrameIndex = minIt->FrameIndex;
m_submitClientTime = minIt->clientTime;
m_prevFramePoseRotation = m_framePoseRotation;
m_framePoseRotation.x = minIt->HeadPose_Pose_Orientation.x;
m_framePoseRotation.y = minIt->HeadPose_Pose_Orientation.y;
m_framePoseRotation.z = minIt->HeadPose_Pose_Orientation.z;
m_framePoseRotation.w = minIt->HeadPose_Pose_Orientation.w;
}
else {
m_submitFrameIndex = 0;
m_submitClientTime = 0;
m_framePoseRotation = HmdQuaternion_Init(0.0, 0.0, 0.0, 0.0);
}
m_poseMutex.Release();
/*Listener::TrackingInfo info;
m_Listener->GetTrackingInfo(info);
m_submitFrameIndex = info.FrameIndex;
m_submitClientTime = info.clientTime;
m_framePoseRotation.x = info.HeadPose_Pose_Orientation.x;
*/
m_submitTextures[0] = sharedTextureHandles[0];
m_submitTextures[1] = sharedTextureHandles[1];
}
@ -1878,11 +1467,6 @@ public:
Log("[VDispDvr] Waiting for previous encode to finish...");
// Wait for the encoder to be ready. This is important because the encoder thread
// blocks on transmit which uses our shared d3d context (which is not thread safe).
m_pEncoder->WaitForEncode();
Log("[VDispDvr] Done");
ID3D11Texture2D *pSyncTexture = m_pD3DRender->GetSharedTexture((HANDLE)syncTexture);
if (!pSyncTexture)
@ -1926,9 +1510,21 @@ public:
//Log("[VDispDvr] Flush-End");
// Wait for the encoder to be ready. This is important because the encoder thread
// blocks on transmit which uses our shared d3d context (which is not thread safe).
m_pEncoder->WaitForEncode();
Log("[VDispDvr] Done");
// Copy entire texture to staging so we can read the pixels to send to remote device.
Log("FrameIndex diff LastRef: %llu render:%llu diff:%llu", m_LastReferencedFrameIndex, m_submitFrameIndex, m_LastReferencedFrameIndex - m_submitFrameIndex);
m_pEncoder->CopyToStaging(pTexture, 2, presentationTime, m_submitFrameIndex, m_submitClientTime);
Listener::TrackingInfo info;
m_Listener->GetTrackingInfo(info);
char buf[2000];
snprintf(buf, sizeof(buf), "%llu\n%f\n%f", m_prevSubmitFrameIndex, m_prevFramePoseRotation.x, info.HeadPose_Pose_Orientation.x);
m_pEncoder->CopyToStaging(pTexture, 2, presentationTime, m_prevSubmitFrameIndex, m_prevSubmitClientTime, std::string(buf));
//Log("[VDispDvr] Flush-Staging(begin)");
@ -1957,8 +1553,12 @@ private:
vr::SharedTextureHandle_t m_submitTextures[2];
vr::HmdMatrix34_t m_framePose;
vr::HmdQuaternion_t m_prevFramePoseRotation;
vr::HmdQuaternion_t m_framePoseRotation;
uint64_t m_submitFrameIndex;
uint64_t m_submitClientTime;
uint64_t m_prevSubmitFrameIndex;
uint64_t m_prevSubmitClientTime;
};
//-----------------------------------------------------------------------------
@ -2043,3 +1643,12 @@ void *HmdDriverFactory( const char *pInterfaceName, int *pReturnCode )
return NULL;
}
BOOL WINAPI DllMain(HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
switch (dwReason) {
case DLL_PROCESS_ATTACH:
g_hInstance = hInstance;
}
return TRUE;
}

View File

@ -94,7 +94,7 @@
</ClCompile>
<Link>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalDependencies>C:\src\virtual_display\x64-libs\libeay32.lib;C:\src\virtual_display\x64-libs\ssleay32.lib;C:\src\virtual_display\x64-libs\pthread_lib.lib;C:\src\virtual_display\x64-libs\srt_static.lib;d3dcompiler.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>C:\src\virtual_display\x64-libs\libeay32.lib;C:\src\virtual_display\x64-libs\ssleay32.lib;C:\src\virtual_display\x64-libs\pthread_lib.lib;C:\src\virtual_display\x64-libs\srt_static.lib;ws2_32.lib;d3dcompiler.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
<CustomBuildStep>
<Command>
@ -139,7 +139,7 @@
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalDependencies>C:\src\virtual_display\x64-libs\libeay32.lib;C:\src\virtual_display\x64-libs\ssleay32.lib;C:\src\virtual_display\x64-libs\pthread_lib.lib;C:\src\virtual_display\x64-libs\srt_static.lib;d3dcompiler.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>C:\src\virtual_display\x64-libs\libeay32.lib;C:\src\virtual_display\x64-libs\ssleay32.lib;C:\src\virtual_display\x64-libs\pthread_lib.lib;C:\src\virtual_display\x64-libs\srt_static.lib;d3dcompiler.lib;ws2_32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
<CustomBuildStep>
<Command>
@ -152,6 +152,7 @@
<ItemGroup>
<ClCompile Include="ControlSocket.cpp" />
<ClCompile Include="driver_virtual_display.cpp" />
<ClCompile Include="FrameRender.cpp" />
<ClCompile Include="NvEncoder.cpp" />
<ClCompile Include="NvEncoderD3D11.cpp" />
<ClCompile Include="Poller.cpp" />
@ -165,6 +166,7 @@
</ItemGroup>
<ItemGroup>
<ClInclude Include="ControlSocket.h" />
<ClInclude Include="FrameRender.h" />
<ClInclude Include="ISocket.h" />
<ClInclude Include="Listener.h" />
<ClInclude Include="Logger.h" />
@ -174,6 +176,7 @@
<ClInclude Include="nvencoderclioptions.h" />
<ClInclude Include="NvEncoderD3D11.h" />
<ClInclude Include="Poller.h" />
<ClInclude Include="resource.h" />
<ClInclude Include="SrtSocket.h" />
<ClInclude Include="UdpSocket.h" />
<ClInclude Include="UdpSender.h" />
@ -182,6 +185,9 @@
<ItemGroup>
<None Include="packages.config" />
</ItemGroup>
<ItemGroup>
<ResourceCompile Include="resource.rc" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
<Import Project="..\packages\directxtk_desktop_2015.2018.4.23.1\build\native\directxtk_desktop_2015.targets" Condition="Exists('..\packages\directxtk_desktop_2015.2018.4.23.1\build\native\directxtk_desktop_2015.targets')" />

View File

@ -19,8 +19,6 @@
#include <cstring>
#include "Logger.h"
extern simplelogger::Logger *logger;
#ifndef _WIN32
#include <cstring>
inline bool operator==(const GUID &guid1, const GUID &guid2) {
@ -244,8 +242,8 @@ public:
}
funcInit(pParams);
LOG(INFO) << NvEncoderInitParam().MainParamToString(pParams);
LOG(TRACE) << NvEncoderInitParam().FullParamToString(pParams);
//LOG(INFO) << NvEncoderInitParam().MainParamToString(pParams);
//LOG(TRACE) << NvEncoderInitParam().FullParamToString(pParams);
}
private:
@ -254,7 +252,7 @@ private:
std::vector<std::string> vstrValueName = split(strValueNames, ' ');
auto it = std::find(vstrValueName.begin(), vstrValueName.end(), strValue);
if (it == vstrValueName.end()) {
LOG(ERROR) << strName << " options: " << strValueNames;
//LOG(ERROR) << strName << " options: " << strValueNames;
return false;
}
*pValue = vValue[it - vstrValueName.begin()];
@ -264,7 +262,7 @@ private:
std::string ConvertValueToString(const std::vector<T> &vValue, const std::string &strValueNames, T value) {
auto it = std::find(vValue.begin(), vValue.end(), value);
if (it == vValue.end()) {
LOG(ERROR) << "Invalid value. Can't convert to one of " << strValueNames;
//LOG(ERROR) << "Invalid value. Can't convert to one of " << strValueNames;
return std::string();
}
return split(strValueNames, ' ')[it - vValue.begin()];
@ -275,7 +273,7 @@ private:
double r = std::stod(strValue, &l);
char c = strValue[l];
if (c != 0 && c != 'k' && c != 'm') {
LOG(ERROR) << strName << " units: 1, K, M (lower case also allowed)";
//LOG(ERROR) << strName << " units: 1, K, M (lower case also allowed)";
}
*pBitRate = (unsigned)((c == 'm' ? 1000000 : (c == 'k' ? 1000 : 1)) * r);
} catch (std::invalid_argument) {
@ -288,7 +286,7 @@ private:
try {
*pInt = std::stoi(strValue);
} catch (std::invalid_argument) {
LOG(ERROR) << strName << " need a value of positive number";
//LOG(ERROR) << strName << " need a value of positive number";
return false;
}
return true;
@ -302,7 +300,7 @@ private:
} else if (vQp.size() == 3) {
*pQp = {(unsigned)std::stoi(vQp[0]), (unsigned)std::stoi(vQp[1]), (unsigned)std::stoi(vQp[2])};
} else {
LOG(ERROR) << strName << " qp_for_P_B_I or qp_P,qp_B,qp_I (no space is allowed)";
//LOG(ERROR) << strName << " qp_for_P_B_I or qp_P,qp_B,qp_I (no space is allowed)";
return false;
}
} catch (std::invalid_argument) {

View File

@ -0,0 +1 @@
#define IDR_FONT 1000

Binary file not shown.

View File

@ -1,7 +1,7 @@
{
"driver_remote_glass": {
"serialNumber": "VD-001",
"modelNumber": "Virtual Display",
"serialNumber": "RG-001",
"modelNumber": "Remote Glass",
"additionalLatencyInSeconds": 0.008,
"displayWidth": 2048,
"displayHeight": 1024,
@ -18,15 +18,15 @@
"IPD": 0.064,
"secondsFromVsyncToPhotons" : 0.005, // 5ms
"displayFrequency" : 60,
"nvencOptions": "-codec h264 -preset ll_hq -rc cbr_ll_hq -bitrate 10M -gop 240 -maxbitrate 10M -fps 60",
"outputFile": "C:\\src\\virtual_display\\test.h264",
//"replayFile": "C:\\src\\virtual_display\\replay.dump",
"logFile": "C:\\src\\virtual_display\\driver.log",
"nvencOptions": "-codec h264 -preset ll_hq -rc cbr_ll_hq -bitrate 20M -gop 240 -maxbitrate 30M -fps 60",
"listenPort": 9944,
"listenHost": "0.0.0.0",
"controlListenPort": 9944,
"controlListenHost": "127.0.0.1",
"debugTimestamp": false,
//"debugFrameIndex": true,
//"debugFrameOutput": true,
//"debugCaptureOutput": true,
"debugOutputDir": "C:\\src\\virtual_display",
"srtOptions": "TSBPDDELAY=1 RCVLATENCY=1"
}
}

312
test/webvr/index.html Normal file
View File

@ -0,0 +1,312 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<!-- Origin Trial Token, feature = WebVR (For Chrome M62+), origin = https://webvr.info, expires = 2018-04-19 -->
<meta http-equiv="origin-trial" data-feature="WebVR (For Chrome M62+)" data-expires="2018-04-19" content="AlKfR1/dWYw2VkHcZUgtb3dn2PNpMVc4mCdRRupY+mP0tZsePGXFJbH/QRWnayZ+AQAqKLxmsmXTKReYPWxbwgcAAABPeyJvcmlnaW4iOiJodHRwczovL3dlYnZyLmluZm86NDQzIiwiZmVhdHVyZSI6IldlYlZSMS4xTTYyIiwiZXhwaXJ5IjoxNTI0MTE2NjAxfQ==">
<title>04 - Simple Mirroring</title>
<!--
This sample demonstrates how to mirror content to an external display
while presenting to a VRDisplay.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
var polyfill = new WebVRPolyfill({
// Ensures the polyfill is always active on mobile, due to providing
// a polyfilled CardboardVRDisplay when no native API is available,
// and also polyfilling even when the native API is available, due to
// providing a CardboardVRDisplay when no native VRDisplays exist.
PROVIDE_MOBILE_VRDISPLAY: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
});
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var gl = null;
var cubeSea = null;
var stats = null;
function onContextLost( event ) {
event.preventDefault();
console.log( 'WebGL Context Lost.' );
gl = null;
cubeSea = null;
stats = null;
}
function onContextRestored( event ) {
console.log( 'WebGL Context Restored.' );
initWebGL(vrDisplay ? vrDisplay.capabilities.hasExternalDisplay : false);
}
var webglCanvas = document.getElementById("webgl-canvas");
webglCanvas.addEventListener( 'webglcontextlost', onContextLost, false );
webglCanvas.addEventListener( 'webglcontextrestored', onContextRestored, false );
function initWebGL (preserveDrawingBuffer) {
// Setting preserveDrawingBuffer to true prevents the canvas from being
// implicitly cleared when calling submitFrame or compositing the canvas
// on the document. For the simplest form of mirroring we want to create
// the canvas with that option enabled. Note that this may incur a
// performance penalty, as it may imply that additional copies of the
// canvas backbuffer need to be made. As a result, we ONLY want to set
// that if we know the VRDisplay has an external display, which is why
// we defer WebGL initialization until after we've gotten results back
// from navigator.getVRDisplays and know which device we'll be
// presenting with.
var glAttribs = {
alpha: false,
preserveDrawingBuffer: preserveDrawingBuffer
};
var useWebgl2 = WGLUUrl.getBool('webgl2', false);
var contextTypes = useWebgl2 ? ["webgl2"] : ["webgl", "experimental-webgl"];
for (var i in contextTypes) {
gl = webglCanvas.getContext(contextTypes[i], glAttribs);
if (gl)
break;
}
if (!gl) {
var webglType = (useWebgl2 ? "WebGL 2" : "WebGL")
VRSamplesUtil.addError("Your browser does not support " + webglType + ".");
return;
}
gl.clearColor(1.0, 1.0, 0.8, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
var enablePerformanceMonitoring = WGLUUrl.getBool(
'enablePerformanceMonitoring', false);
stats = new WGLUStats(gl, enablePerformanceMonitoring);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function (err) {
var errMsg = "requestPresent failed.";
if (err && err.message) {
errMsg += "<br/>" + err.message
}
VRSamplesUtil.addError(errMsg, 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.getVRDisplays) {
frameData = new VRFrameData();
navigator.getVRDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[displays.length - 1];
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
// For the benefit of automated testing. Safe to ignore.
if (vrDisplay.capabilities.canPresent && WGLUUrl.getBool('canvasClickPresents', false))
webglCanvas.addEventListener("click", onVRRequestPresent, false);
window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);
window.addEventListener('vrdisplayactivate', onVRRequestPresent, false);
window.addEventListener('vrdisplaydeactivate', onVRExitPresent, false);
// Only use preserveDrawingBuffer if we have an external display to
// mirror to.
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
}, function () {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
// No VR means no mirroring, so create WebGL content without
// preserveDrawingBuffer
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = webglCanvas.offsetWidth * window.devicePixelRatio;
webglCanvas.height = webglCanvas.offsetHeight * window.devicePixelRatio;
}
}
function onAnimationFrame (t) {
// do not attempt to render if there is no available WebGL context
if (!gl || !stats || !cubeSea) {
return;
}
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
var pp = [
1.00000, 0.00000, 0.00000, 0.00000,
0.00000, 1.00000, 0.00000, 0.00000,
0.00000, 0.00000, -1.00000, -1.00000,
0.00000, 0.00000, -0.20000, 0.00000];
var p = new Float32Array(16);
p.set(pp);
window.test = 1;
if(!test){
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, null, t, frameData.pose.orientation);
}else{
cubeSea.render(p, frameData.leftViewMatrix, null, t, frameData.pose.orientation);
}
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
if(!test){
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, null, t, frameData.pose.orientation);
}else{
cubeSea.render(p, frameData.rightViewMatrix, null, t, frameData.pose.orientation);
}
vrDisplay.submitFrame();
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, null, t);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, null, t);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>

View File

@ -0,0 +1,389 @@
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<!-- No Origin Trial tokens, this page is intended to be viewed via HTTP -->
<title>TEST - Insecure WebVR</title>
<!--
This sample demonstrates using WebVR on an insecure page
-->
<style>
#webgl-canvas, #presenting-message {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
#presenting-message {
color: white;
font-family: sans-serif;
font-size: 2em;
font-weight: bold;
z-index: 1;
text-align: center;
padding: 0.5em;
background-color: #444;
display: none;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Ensures the polyfill is always active when initialized, even if the
// native API is available. This is probably NOT what most pages want.
ALWAYS_APPEND_POLYFILL_DISPLAY: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="../js/third-party/webvr-polyfill.js"></script>
<script src="../js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) {
InitializeWebVRPolyfill();
} else {
// Shim for migration from older version of WebVR. Shouldn't be necessary for very long.
InitializeSpecShim();
}
</script>
<!-- End sample polyfill enabling logic -->
<script src="../js/third-party/gl-matrix-min.js"></script>
<script src="../js/third-party/wglu/wglu-program.js"></script>
<script src="../js/third-party/wglu/wglu-stats.js"></script>
<script src="../js/third-party/wglu/wglu-texture.js"></script>
<script src="../js/vr-cube-sea.js"></script>
<script src="../js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<div id="presenting-message">Put on your headset now</div>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var frameData = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
var presentingMessage = document.getElementById("presenting-message");
// ===================================================
// WebGL scene setup. This code is not WebVR specific.
// ===================================================
// WebGL setup.
var gl = null;
var cubeSea = null;
var stats = null;
function onContextLost( event ) {
event.preventDefault();
console.log( 'WebGL Context Lost.' );
gl = null;
cubeSea = null;
stats = null;
}
function onContextRestored( event ) {
console.log( 'WebGL Context Restored.' );
initWebGL();
}
var webglCanvas = document.getElementById("webgl-canvas");
webglCanvas.addEventListener( 'webglcontextlost', onContextLost, false );
webglCanvas.addEventListener( 'webglcontextrestored', onContextRestored, false );
function initWebGL() {
var glAttribs = {
alpha: false,
};
var useWebgl2 = WGLUUrl.getBool('webgl2', false);
var contextTypes = useWebgl2 ? ["webgl2"] : ["webgl", "experimental-webgl"];
for (var i in contextTypes) {
gl = webglCanvas.getContext(contextTypes[i], glAttribs);
if (gl)
break;
}
if (!gl) {
var webglType = (useWebgl2 ? "WebGL 2" : "WebGL")
VRSamplesUtil.addError("Your browser does not support " + webglType + ".");
return;
}
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("../media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
var enablePerformanceMonitoring = WGLUUrl.getBool(
'enablePerformanceMonitoring', false);
stats = new WGLUStats(gl, enablePerformanceMonitoring);
}
initWebGL();
// ================================
// WebVR-specific code begins here.
// ================================
function onVRRequestPresent () {
// This can only be called in response to a user gesture.
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
// Nothing to do because we're handling things in onVRPresentChange.
}, function (err) {
var errMsg = "requestPresent failed.";
if (err && err.message) {
errMsg += "<br/>" + err.message
}
VRSamplesUtil.addError(errMsg, 2000);
});
}
function onVRExitPresent () {
// No sense in exiting presentation if we're not actually presenting.
// (This may happen if we get an event like vrdisplaydeactivate when
// we weren't presenting.)
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
// Nothing to do because we're handling things in onVRPresentChange.
}, function (err) {
var errMsg = "exitPresent failed.";
if (err && err.message) {
errMsg += "<br/>" + err.message
}
VRSamplesUtil.addError(errMsg, 2000);
});
}
function onVRPresentChange () {
// When we begin or end presenting, the canvas should be resized to the
// recommended dimensions for the display.
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
// Because we're not mirroring any images on an external screen will
// freeze while presenting. It's better to replace it with a message
// indicating that content is being shown on the VRDisplay.
presentingMessage.style.display = "block";
// On devices with an external display the UA may not provide a way
// to exit VR presentation mode, so we should provide one ourselves.
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "../media/icons/cardboard64.png", onVRExitPresent);
}
} else {
// If we have an external display take down the presenting message and
// change the button back to "Enter VR".
if (vrDisplay.capabilities.hasExternalDisplay) {
presentingMessage.style.display = "";
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "../media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.getVRDisplays) {
frameData = new VRFrameData();
navigator.getVRDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[displays.length - 1];
// It's heighly reccommended that you set the near and far planes to
// something appropriate for your scene so the projection matricies
// WebVR produces have a well scaled depth buffer.
vrDisplay.depthNear = 0.1;
vrDisplay.depthFar = 1024.0;
// Generally, you want to wait until VR support is confirmed and
// you know the user has a VRDisplay capable of presenting connected
// before adding UI that advertises VR features.
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "../media/icons/cardboard64.png", onVRRequestPresent);
// For the benefit of automated testing. Safe to ignore.
if (vrDisplay.capabilities.canPresent && WGLUUrl.getBool('canvasClickPresents', false))
webglCanvas.addEventListener("click", onVRRequestPresent, false);
// The UA may kick us out of VR present mode for any reason, so to
// ensure we always know when we begin/end presenting we need to
// listen for vrdisplaypresentchange events.
window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);
// These events fire when the user agent has had some indication that
// it would be appropariate to enter or exit VR presentation mode, such
// as the user putting on a headset and triggering a proximity sensor.
// You can inspect the `reason` property of the event to learn why the
// event was fired, but in this case we're going to always trust the
// event and enter or exit VR presentation mode when asked.
window.addEventListener('vrdisplayactivate', onVRRequestPresent, false);
window.addEventListener('vrdisplaydeactivate', onVRExitPresent, false);
} else {
VRSamplesUtil.addInfo("WebVR supported, but no VRDisplays found.", 3000);
}
}, function () {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
});
} else if (navigator.getVRDevices) {
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = webglCanvas.offsetWidth * window.devicePixelRatio;
webglCanvas.height = webglCanvas.offsetHeight * window.devicePixelRatio;
}
}
window.addEventListener("resize", onResize, false);
onResize();
// Listen for click events on the canvas, which may come from something
// like a Cardboard viewer or other VR controller, and make a small change
// to the scene in response (so that we know it's working.) This basic
// interaction mode is the baseline for all WebVR compatible devices, and
// should ideally always be minimally supported.
function onClick () {
// Reset the background color to a random value
if (gl) {
gl.clearColor(
Math.random() * 0.5,
Math.random() * 0.5,
Math.random() * 0.5, 1.0);
}
}
// Register for mouse restricted events while in VR
// (e.g. mouse no longer available on desktop 2D view)
function onDisplayPointerRestricted() {
if (webglCanvas && webglCanvas.requestPointerLock) {
webglCanvas.requestPointerLock();
}
}
// Register for mouse unrestricted events while in VR
// (e.g. mouse once again available on desktop 2D view)
function onDisplayPointerUnrestricted() {
var lock = document.pointerLockElement;
if (lock && lock === webglCanvas && document.exitPointerLock) {
document.exitPointerLock();
}
}
webglCanvas.addEventListener("click", onClick, false);
window.addEventListener('vrdisplaypointerrestricted', onDisplayPointerRestricted);
window.addEventListener('vrdisplaypointerunrestricted', onDisplayPointerUnrestricted);
function onAnimationFrame (t) {
// do not attempt to render if there is no available WebGL context
if (!gl || !stats || !cubeSea) {
return;
}
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
// When presenting content to the VRDisplay we want to update at its
// refresh rate if it differs from the refresh rate of the main
// display. Calling VRDisplay.requestAnimationFrame ensures we render
// at the right speed for VR.
vrDisplay.requestAnimationFrame(onAnimationFrame);
// As a general rule you want to get the pose as late as possible
// and call VRDisplay.submitFrame as early as possible after
// retrieving the pose. Do any work for the frame that doesn't need
// to know the pose earlier to ensure the lowest latency possible.
//var pose = vrDisplay.getPose();
vrDisplay.getFrameData(frameData);
if (vrDisplay.isPresenting) {
// When presenting render a stereo view.
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.leftProjectionMatrix, frameData.leftViewMatrix, stats, t);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
cubeSea.render(frameData.rightProjectionMatrix, frameData.rightViewMatrix, stats, t);
// If we're currently presenting to the VRDisplay we need to
// explicitly indicate we're done rendering.
vrDisplay.submitFrame();
} else {
// When not presenting render a mono view that still takes pose into
// account.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
// It's best to use our own projection matrix in this case, but we can use the left eye's view matrix
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
cubeSea.render(projectionMat, frameData.leftViewMatrix, stats, t);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats, t);
stats.renderOrtho();
}
stats.end();
}
window.requestAnimationFrame(onAnimationFrame);
})();
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,385 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var WGLUDebugGeometry = (function() {
"use strict";
var debugGeomVS = [
"uniform mat4 projectionMat;",
"uniform mat4 viewMat;",
"uniform mat4 modelMat;",
"uniform mat3 normalMat;",
"attribute vec3 position;",
"attribute vec3 normal;",
"varying vec3 v_normal;",
"void main() {",
" gl_Position = projectionMat * viewMat * modelMat * vec4( position, 1.0 );",
" v_normal = normalMat * normal;",
"}",
].join("\n");
// Simple shading with a single light source, this uses half-lambert
// shading to keep things from getting too dark in the unlit areas.
// It's not physically based but looks ok.
var debugGeomFS = [
"precision mediump float;",
"uniform vec4 color;",
"uniform vec3 light;",
"varying vec3 v_normal;",
"void main() {",
" vec3 normal = normalize(v_normal);",
" gl_FragColor = ((dot(light, normal) * 0.5 + 0.5) * 0.8 + 0.2) * color;",
"}",
].join("\n");
var DebugGeometry = function(gl) {
this.gl = gl;
this.projMat = mat4.create();
this.viewMat = mat4.create();
this.modelMat = mat4.create();
this.normalMat = mat3.create();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(debugGeomVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(debugGeomFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({ position: 0 });
this.program.bindAttribLocation({ normal: 1 });
this.program.link();
var verts = [];
var indices = [];
//
// Cube Geometry
//
this.cubeIndexOffset = indices.length;
var size = 0.5;
// Bottom
var idx = verts.length / 6.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, -size, 0, -1, 0);
verts.push(+size, -size, -size, 0, -1, 0);
verts.push(+size, -size, +size, 0, -1, 0);
verts.push(-size, -size, +size, 0, -1, 0);
// Top
idx = verts.length / 6.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, +size, -size, 0, 1, 0);
verts.push(+size, +size, -size, 0, 1, 0);
verts.push(+size, +size, +size, 0, 1, 0);
verts.push(-size, +size, +size, 0, 1, 0);
// Left
idx = verts.length / 6.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size, -1, 0, 0);
verts.push(-size, +size, -size, -1, 0, 0);
verts.push(-size, +size, +size, -1, 0, 0);
verts.push(-size, -size, +size, -1, 0, 0);
// Right
idx = verts.length / 6.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(+size, -size, -size, 1, 0, 0);
verts.push(+size, +size, -size, 1, 0, 0);
verts.push(+size, +size, +size, 1, 0, 0);
verts.push(+size, -size, +size, 1, 0, 0);
// Back
idx = verts.length / 6.0;
indices.push(idx, idx+2, idx+1);
indices.push(idx, idx+3, idx+2);
verts.push(-size, -size, -size, 0, 0, -1);
verts.push(+size, -size, -size, 0, 0, -1);
verts.push(+size, +size, -size, 0, 0, -1);
verts.push(-size, +size, -size, 0, 0, -1);
// Front
idx = verts.length / 6.0;
indices.push(idx, idx+1, idx+2);
indices.push(idx, idx+2, idx+3);
verts.push(-size, -size, +size, 0, 0, 1);
verts.push(+size, -size, +size, 0, 0, 1);
verts.push(+size, +size, +size, 0, 0, 1);
verts.push(-size, +size, +size, 0, 0, 1);
this.cubeIndexCount = indices.length - this.cubeIndexOffset;
//
// Cone Geometry
//
this.coneIndexOffset = indices.length;
var size = 0.5;
var conePointVertex = verts.length / 6.0;
var coneBaseVertex = conePointVertex+1;
var coneSegments = 64;
// Cone side vertices
for (var i = 0; i < coneSegments; ++i) {
idx = verts.length / 6.0;
indices.push(idx, idx + 1, idx + 2);
var rad = ((Math.PI * 2) / coneSegments) * i;
var rad2 = ((Math.PI * 2) / coneSegments) * (i + 1);
verts.push(Math.sin(rad) * (size / 2), -size, Math.cos(rad) * (size / 2),
Math.sin(rad), 0.25, Math.cos(rad));
verts.push(Math.sin(rad2) * (size / 2), -size, Math.cos(rad2) * (size / 2),
Math.sin(rad2), 0.25, Math.cos(rad2));
verts.push(0, size, 0,
Math.sin((rad + rad2) / 2), 0.25, Math.cos((rad + rad2) / 2));
}
// Base triangles
var baseCenterIdx = verts.length / 6.0;
verts.push(0, -size, 0, 0, -1, 0);
for (var i = 0; i < coneSegments; ++i) {
idx = verts.length / 6.0;
indices.push(baseCenterIdx, idx, idx + 1);
var rad = ((Math.PI * 2) / coneSegments) * i;
var rad2 = ((Math.PI * 2) / coneSegments) * (i + 1);
verts.push(Math.sin(rad2) * (size / 2.0), -size, Math.cos(rad2) * (size / 2.0), 0, -1, 0);
verts.push(Math.sin(rad) * (size / 2.0), -size, Math.cos(rad) * (size / 2.0), 0, -1, 0);
}
this.coneIndexCount = indices.length - this.coneIndexOffset;
//
// Rect geometry
//
this.rectIndexOffset = indices.length;
idx = verts.length / 6.0;
indices.push(idx, idx+1, idx+2, idx+3, idx);
verts.push(0, 0, 0, 0, 0, -1);
verts.push(1, 0, 0, 0, 0, -1);
verts.push(1, 1, 0, 0, 0, -1);
verts.push(0, 1, 0, 0, 0, -1);
this.rectIndexCount = indices.length - this.rectIndexOffset;
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
DebugGeometry.prototype.bind = function(projectionMat, viewMat) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.viewMat, false, viewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 24, 0);
gl.vertexAttribPointer(program.attrib.normal, 3, gl.FLOAT, false, 24, 12);
};
DebugGeometry.prototype.bindOrtho = function() {
mat4.ortho(this.projMat, 0, this.gl.canvas.width, this.gl.canvas.height, 0, 0.1, 1024);
mat4.identity(this.viewMat);
this.bind(this.projMat, this.viewMat);
};
DebugGeometry.prototype._bindUniformsRaw = function(model, color, light) {
if (!color) { color = [1, 0, 0, 1]; }
if (!light) { light = [0.75, 0.5, 1.0]; } // Should match vr-cube-sea.js
var lightVec = vec3.fromValues(light[0], light[1], light[2]);
vec3.normalize(lightVec, lightVec);
mat3.normalFromMat4(this.normalMat, model);
this.gl.uniformMatrix4fv(this.program.uniform.modelMat, false, model);
this.gl.uniformMatrix3fv(this.program.uniform.normalMat, false, this.normalMat);
this.gl.uniform4fv(this.program.uniform.color, color);
this.gl.uniform3fv(this.program.uniform.light, lightVec);
};
DebugGeometry.prototype._bindUniforms = function(orientation, position, scale, color, light) {
if (!position) { position = [0, 0, 0]; }
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!scale) { scale = [1, 1, 1]; }
mat4.fromRotationTranslationScale(this.modelMat, orientation, position, scale);
this._bindUniformsRaw(this.modelMat, color, light);
};
DebugGeometry.prototype.drawCube = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBox = function(orientation, position, scale, color) {
var gl = this.gl;
this._bindUniforms(orientation, position, scale, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawBoxWithMatrix = function(mat, color) {
var gl = this.gl;
this._bindUniformsRaw(mat, color);
gl.drawElements(gl.TRIANGLES, this.cubeIndexCount, gl.UNSIGNED_SHORT, this.cubeIndexOffset * 2.0);
};
DebugGeometry.prototype.drawRect = function(x, y, width, height, color) {
var gl = this.gl;
this._bindUniforms(null, [x, y, -1], [width, height, 1], color);
gl.drawElements(gl.LINE_STRIP, this.rectIndexCount, gl.UNSIGNED_SHORT, this.rectIndexOffset * 2.0);
};
DebugGeometry.prototype.drawCone = function(orientation, position, size, color) {
var gl = this.gl;
if (!size) { size = 1; }
this._bindUniforms(orientation, position, [size, size, size], color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
DebugGeometry.prototype.drawConeWithMatrix = function(mat, color) {
var gl = this.gl;
this._bindUniformsRaw(mat, color);
gl.drawElements(gl.TRIANGLES, this.coneIndexCount, gl.UNSIGNED_SHORT, this.coneIndexOffset * 2.0);
};
var arrowMat = mat4.create();
var arrowMatTemp = mat4.create();
var arrowVecA = vec3.create();
var arrowVecB = vec3.create();
var arrowVecC = vec3.create();
// Draw an arrow for visualizing a vector. Unit length is 10cm,
// you can apply an additional length scale on top of that to resize
// vector length while keeping the thickness/arrow head unchanged.
DebugGeometry.prototype.drawArrow = function(mat, v, color, opt_lenScale) {
// Find the largest component of the input vector.
var maxIdx = -1;
var maxLen = 0;
if (Math.abs(v[0]) > maxLen) { maxLen = Math.abs(v[0]); maxIdx = 0; }
if (Math.abs(v[1]) > maxLen) { maxLen = Math.abs(v[1]); maxIdx = 1; }
if (Math.abs(v[2]) > maxLen) { maxLen = Math.abs(v[2]); maxIdx = 2; }
// If the vector is all zero, can't draw the arrow.
if (maxIdx < 0) return;
// Build rotation matrix by computing three orthonormal base vectors.
var a = arrowVecA;
var b = arrowVecB;
var c = arrowVecC;
// New "Z" axis points in direction of the supplied vector.
vec3.normalize(c, v);
// Find an arbitrary vector orthogonal to vector c. Use the largest
// component index computed above to ensure it's nonzero.
var i = maxIdx;
var j = (maxIdx + 1) % 3;
var k = (maxIdx + 2) % 3;
a[i] = -c[j] - c[k];
a[j] = c[i];
a[k] = c[i];
// For the third base vector, just use the cross product of the two
// found so far.
vec3.cross(b, c, a);
// Now we're ready to set up the rotation matrix.
mat4.set(arrowMatTemp,
a[0], a[1], a[2], 0,
b[0], b[1], b[2], 0,
c[0], c[1], c[2], 0,
0, 0, 0, 1);
// Apply this rotation to the supplied base transform matrix,
// add a scale factor so that a unit vector will show as 10cm instead
// of 1m size.
mat4.multiply(arrowMat, mat, arrowMatTemp);
mat4.scale(arrowMat, arrowMat, [0.1, 0.1, 0.1]);
var arrowLen = vec3.length(v);
if (opt_lenScale) arrowLen *= opt_lenScale;
// Cone arrow head
mat4.translate(arrowMatTemp, arrowMat, [0, 0, arrowLen]);
mat4.rotateX(arrowMatTemp, arrowMatTemp, Math.PI * 0.5);
mat4.scale(arrowMatTemp, arrowMatTemp, [0.3, 0.3, 0.3]);
this.drawConeWithMatrix(arrowMatTemp, color);
// Arrow stem quadrilateral
mat4.translate(arrowMatTemp, arrowMat, [0, 0, arrowLen / 2]);
mat4.scale(arrowMatTemp, arrowMatTemp, [0.05, 0.05, arrowLen]);
this.drawBoxWithMatrix(arrowMatTemp, color);
};
var arrowColor = vec4.create();
var axisVec = vec3.create();
// Draws coordinate axis vectors from the matrix's transform
// origin. x=red, y=green, z=blue, unit length is 10cm.
DebugGeometry.prototype.drawCoordinateAxes = function(mat) {
vec4.set(arrowColor, 1, 0, 0, 1);
vec3.set(axisVec, 1, 0, 0);
this.drawArrow(mat, axisVec, arrowColor);
vec4.set(arrowColor, 0, 1, 0, 1);
vec3.set(axisVec, 0, 1, 0);
this.drawArrow(mat, axisVec, arrowColor);
vec4.set(arrowColor, 0, 0, 1, 1);
vec3.set(axisVec, 0, 0, 1);
this.drawArrow(mat, axisVec, arrowColor);
};
return DebugGeometry;
})();

View File

@ -0,0 +1,162 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Caches specified GL state, runs a callback, and restores the cached state when
done.
Example usage:
var savedState = [
gl.ARRAY_BUFFER_BINDING,
// TEXTURE_BINDING_2D or _CUBE_MAP must always be followed by the texure unit.
gl.TEXTURE_BINDING_2D, gl.TEXTURE0,
gl.CLEAR_COLOR,
];
// After this call the array buffer, texture unit 0, active texture, and clear
// color will be restored. The viewport will remain changed, however, because
// gl.VIEWPORT was not included in the savedState list.
WGLUPreserveGLState(gl, savedState, function(gl) {
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, ....);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, ...);
gl.clearColor(1, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
});
Note that this is not intended to be fast. Managing state in your own code to
avoid redundant state setting and querying will always be faster. This function
is most useful for cases where you may not have full control over the WebGL
calls being made, such as tooling or effect injectors.
*/
function WGLUPreserveGLState(gl, bindings, callback) {
if (!bindings) {
callback(gl);
return;
}
var boundValues = [];
var activeTexture = null;
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
switch (binding) {
case gl.TEXTURE_BINDING_2D:
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31) {
console.error("TEXTURE_BINDING_2D or TEXTURE_BINDING_CUBE_MAP must be followed by a valid texture unit");
boundValues.push(null, null);
break;
}
if (!activeTexture) {
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
}
gl.activeTexture(textureUnit);
boundValues.push(gl.getParameter(binding), null);
break;
case gl.ACTIVE_TEXTURE:
activeTexture = gl.getParameter(gl.ACTIVE_TEXTURE);
boundValues.push(null);
break;
default:
boundValues.push(gl.getParameter(binding));
break;
}
}
callback(gl);
for (var i = 0; i < bindings.length; ++i) {
var binding = bindings[i];
var boundValue = boundValues[i];
switch (binding) {
case gl.ACTIVE_TEXTURE:
break; // Ignore this binding, since we special-case it to happen last.
case gl.ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ARRAY_BUFFER, boundValue);
break;
case gl.COLOR_CLEAR_VALUE:
gl.clearColor(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.COLOR_WRITEMASK:
gl.colorMask(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.CURRENT_PROGRAM:
gl.useProgram(boundValue);
break;
case gl.ELEMENT_ARRAY_BUFFER_BINDING:
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, boundValue);
break;
case gl.FRAMEBUFFER_BINDING:
gl.bindFramebuffer(gl.FRAMEBUFFER, boundValue);
break;
case gl.RENDERBUFFER_BINDING:
gl.bindRenderbuffer(gl.RENDERBUFFER, boundValue);
break;
case gl.TEXTURE_BINDING_2D:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, boundValue);
break;
case gl.TEXTURE_BINDING_CUBE_MAP:
var textureUnit = bindings[++i];
if (textureUnit < gl.TEXTURE0 || textureUnit > gl.TEXTURE31)
break;
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, boundValue);
break;
case gl.VIEWPORT:
gl.viewport(boundValue[0], boundValue[1], boundValue[2], boundValue[3]);
break;
case gl.BLEND:
case gl.CULL_FACE:
case gl.DEPTH_TEST:
case gl.SCISSOR_TEST:
case gl.STENCIL_TEST:
if (boundValue) {
gl.enable(binding);
} else {
gl.disable(binding);
}
break;
default:
console.log("No GL restore behavior for 0x" + binding.toString(16));
break;
}
if (activeTexture) {
gl.activeTexture(activeTexture);
}
}
}

View File

@ -0,0 +1,179 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Utility class to make loading shader programs easier. Does all the error
checking you typically want, automatically queries uniform and attribute
locations, and attempts to take advantage of some browser's ability to link
asynchronously by not querying any information from the program until it's
first use.
*/
var WGLUProgram = (function() {
"use strict";
// Attempts to allow the browser to asynchronously compile and link
var Program = function(gl) {
this.gl = gl;
this.program = gl.createProgram();
this.attrib = null;
this.uniform = null;
this._firstUse = true;
this._vertexShader = null;
this._fragmentShader = null;
}
Program.prototype.attachShaderSource = function(source, type) {
var gl = this.gl;
var shader;
switch (type) {
case gl.VERTEX_SHADER:
this._vertexShader = gl.createShader(type);
shader = this._vertexShader;
break;
case gl.FRAGMENT_SHADER:
this._fragmentShader = gl.createShader(type);
shader = this._fragmentShader;
break;
default:
console.Error("Invalid Shader Type:", type);
return;
}
gl.attachShader(this.program, shader);
gl.shaderSource(shader, source);
gl.compileShader(shader);
}
Program.prototype.attachShaderSourceFromXHR = function(url, type) {
var self = this;
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", function (ev) {
if (xhr.status == 200) {
self.attachShaderSource(xhr.response, type);
resolve();
} else {
reject(xhr.statusText);
}
}, false);
xhr.open("GET", url, true);
xhr.send(null);
});
}
Program.prototype.attachShaderSourceFromTag = function(tagId, type) {
var shaderTag = document.getElementById(tagId);
if (!shaderTag) {
console.error("Shader source tag not found:", tagId);
return;
}
if (!type) {
if (shaderTag.type == "x-shader/x-vertex") {
type = this.gl.VERTEX_SHADER;
} else if (shaderTag.type == "x-shader/x-fragment") {
type = this.gl.FRAGMENT_SHADER;
} else {
console.error("Invalid Shader Type:", shaderTag.type);
return;
}
}
var src = "";
var k = shaderTag.firstChild;
while (k) {
if (k.nodeType == 3) {
src += k.textContent;
}
k = k.nextSibling;
}
this.attachShaderSource(src, type);
}
Program.prototype.bindAttribLocation = function(attribLocationMap) {
var gl = this.gl;
if (attribLocationMap) {
this.attrib = {};
for (var attribName in attribLocationMap) {
gl.bindAttribLocation(this.program, attribLocationMap[attribName], attribName);
this.attrib[attribName] = attribLocationMap[attribName];
}
}
}
Program.prototype.transformFeedbackVaryings = function(varyings, type) {
gl.transformFeedbackVaryings(this.program, varyings, type);
}
Program.prototype.link = function() {
this.gl.linkProgram(this.program);
}
Program.prototype.use = function() {
var gl = this.gl;
// If this is the first time the program has been used do all the error checking and
// attrib/uniform querying needed.
if (this._firstUse) {
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
if (this._vertexShader && !gl.getShaderParameter(this._vertexShader, gl.COMPILE_STATUS)) {
console.error("Vertex shader compile error:", gl.getShaderInfoLog(this._vertexShader));
} else if (this._fragmentShader && !gl.getShaderParameter(this._fragmentShader, gl.COMPILE_STATUS)) {
console.error("Fragment shader compile error:", gl.getShaderInfoLog(this._fragmentShader));
} else {
console.error("Program link error:", gl.getProgramInfoLog(this.program));
}
gl.deleteProgram(this.program);
this.program = null;
} else {
if (!this.attrib) {
this.attrib = {};
var attribCount = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES);
for (var i = 0; i < attribCount; i++) {
var attribInfo = gl.getActiveAttrib(this.program, i);
this.attrib[attribInfo.name] = gl.getAttribLocation(this.program, attribInfo.name);
}
}
this.uniform = {};
var uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(this.program, i);
uniformName = uniformInfo.name.replace("[0]", "");
this.uniform[uniformName] = gl.getUniformLocation(this.program, uniformName);
}
}
gl.deleteShader(this._vertexShader);
gl.deleteShader(this._fragmentShader);
this._firstUse = false;
}
gl.useProgram(this.program);
}
return Program;
})();

View File

@ -0,0 +1,670 @@
/*
Copyright (c) 2016, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Heavily inspired by Mr. Doobs stats.js, this FPS counter is rendered completely
with WebGL, allowing it to be shown in cases where overlaid HTML elements aren't
usable (like WebVR), or if you want the FPS counter to be rendered as part of
your scene.
See stats-test.html for basic usage.
*/
var WGLUStats = (function() {
"use strict";
//--------------------
// glMatrix functions
//--------------------
// These functions have been copied here from glMatrix (glmatrix.net) to allow
// this file to run standalone.
var mat4_identity = function(out) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
var mat4_multiply = function (out, a, b) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
// Cache only the current line of the second matrix
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
return out;
};
var mat4_fromTranslation = function(out, v) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
var mat4_ortho = function (out, left, right, bottom, top, near, far) {
var lr = 1 / (left - right),
bt = 1 / (bottom - top),
nf = 1 / (near - far);
out[0] = -2 * lr;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = -2 * bt;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 2 * nf;
out[11] = 0;
out[12] = (left + right) * lr;
out[13] = (top + bottom) * bt;
out[14] = (far + near) * nf;
out[15] = 1;
return out;
};
var mat4_translate = function (out, a, v) {
var x = v[0], y = v[1], z = v[2],
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
if (a === out) {
out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
} else {
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
out[12] = a00 * x + a10 * y + a20 * z + a[12];
out[13] = a01 * x + a11 * y + a21 * z + a[13];
out[14] = a02 * x + a12 * y + a22 * z + a[14];
out[15] = a03 * x + a13 * y + a23 * z + a[15];
}
return out;
};
var mat4_scale = function(out, a, v) {
var x = v[0], y = v[1], z = v[2];
out[0] = a[0] * x;
out[1] = a[1] * x;
out[2] = a[2] * x;
out[3] = a[3] * x;
out[4] = a[4] * y;
out[5] = a[5] * y;
out[6] = a[6] * y;
out[7] = a[7] * y;
out[8] = a[8] * z;
out[9] = a[9] * z;
out[10] = a[10] * z;
out[11] = a[11] * z;
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
//-------------------
// Utility functions
//-------------------
function linkProgram(gl, vertexSource, fragmentSource, attribLocationMap) {
// No error checking for brevity.
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexSource);
gl.compileShader(vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentSource);
gl.compileShader(fragmentShader);
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
for (var attribName in attribLocationMap)
gl.bindAttribLocation(program, attribLocationMap[attribName], attribName);
gl.linkProgram(program);
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
return program;
}
function getProgramUniforms(gl, program) {
var uniforms = {};
var uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS);
var uniformName = "";
for (var i = 0; i < uniformCount; i++) {
var uniformInfo = gl.getActiveUniform(program, i);
uniformName = uniformInfo.name.replace("[0]", "");
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
}
return uniforms;
}
//----------------------------
// Seven-segment text display
//----------------------------
var sevenSegmentVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec2 position;",
"void main() {",
" gl_Position = projectionMat * modelViewMat * vec4( position, 0.0, 1.0 );",
"}",
].join("\n");
var sevenSegmentFS = [
"precision mediump float;",
"uniform vec4 color;",
"void main() {",
" gl_FragColor = color;",
"}",
].join("\n");
var SevenSegmentText = function (gl) {
this.gl = gl;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, sevenSegmentVS, sevenSegmentFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var verts = [];
var segmentIndices = {};
var indices = [];
var width = 0.5;
var thickness = 0.25;
this.kerning = 2.0;
this.matrix = new Float32Array(16);
function defineSegment(id, left, top, right, bottom) {
var idx = verts.length / 2;
verts.push(
left, top,
right, top,
right, bottom,
left, bottom);
segmentIndices[id] = [
idx, idx+2, idx+1,
idx, idx+3, idx+2];
}
var characters = {};
this.characters = characters;
function defineCharacter(c, segments) {
var character = {
character: c,
offset: indices.length * 2,
count: 0
};
for (var i = 0; i < segments.length; ++i) {
var idx = segments[i];
var segment = segmentIndices[idx];
character.count += segment.length;
indices.push.apply(indices, segment);
}
characters[c] = character;
}
/* Segment layout is as follows:
|-0-|
3 4
|-1-|
5 6
|-2-|
*/
defineSegment(0, -1, 1, width, 1-thickness);
defineSegment(1, -1, thickness*0.5, width, -thickness*0.5);
defineSegment(2, -1, -1+thickness, width, -1);
defineSegment(3, -1, 1, -1+thickness, -thickness*0.5);
defineSegment(4, width-thickness, 1, width, -thickness*0.5);
defineSegment(5, -1, thickness*0.5, -1+thickness, -1);
defineSegment(6, width-thickness, thickness*0.5, width, -1);
defineCharacter("0", [0, 2, 3, 4, 5, 6]);
defineCharacter("1", [4, 6]);
defineCharacter("2", [0, 1, 2, 4, 5]);
defineCharacter("3", [0, 1, 2, 4, 6]);
defineCharacter("4", [1, 3, 4, 6]);
defineCharacter("5", [0, 1, 2, 3, 6]);
defineCharacter("6", [0, 1, 2, 3, 5, 6]);
defineCharacter("7", [0, 4, 6]);
defineCharacter("8", [0, 1, 2, 3, 4, 5, 6]);
defineCharacter("9", [0, 1, 2, 3, 4, 6]);
defineCharacter("A", [0, 1, 3, 4, 5, 6]);
defineCharacter("B", [1, 2, 3, 5, 6]);
defineCharacter("C", [0, 2, 3, 5]);
defineCharacter("D", [1, 2, 4, 5, 6]);
defineCharacter("E", [0, 1, 2, 4, 6]);
defineCharacter("F", [0, 1, 3, 5]);
defineCharacter("P", [0, 1, 3, 4, 5]);
defineCharacter("-", [1]);
defineCharacter(" ", []);
defineCharacter("_", [2]); // Used for undefined characters
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.DYNAMIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
};
SevenSegmentText.prototype.render = function(projectionMat, modelViewMat, text, r, g, b, a) {
var gl = this.gl;
if (r == undefined || g == undefined || b == undefined) {
r = 0.0;
g = 1.0;
b = 0.0;
}
if (a == undefined)
a = 1.0;
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniform4f(this.uniforms.color, r, g, b, a);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(this.attribs.position);
gl.vertexAttribPointer(this.attribs.position, 2, gl.FLOAT, false, 8, 0);
text = text.toUpperCase();
var offset = 0;
for (var i = 0; i < text.length; ++i) {
var c;
if (text[i] in this.characters) {
c = this.characters[text[i]];
} else {
c = this.characters["_"];
}
if (c.count != 0) {
mat4_fromTranslation(this.matrix, [offset, 0, 0]);
mat4_multiply(this.matrix, modelViewMat, this.matrix);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, this.matrix);
gl.drawElements(gl.TRIANGLES, c.count, gl.UNSIGNED_SHORT, c.offset);
}
offset += this.kerning;
}
}
//-----------
// FPS Graph
//-----------
var statsVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec3 color;",
"varying vec4 vColor;",
"void main() {",
" vColor = vec4(color, 1.0);",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var statsFS = [
"precision mediump float;",
"varying vec4 vColor;",
"void main() {",
" gl_FragColor = vColor;",
"}",
].join("\n");
var segments = 30;
var maxFPS = 90;
function segmentToX(i) {
return ((0.9/segments) * i) - 0.45;
}
function fpsToY(value) {
return (Math.min(value, maxFPS) * (0.7 / maxFPS)) - 0.45;
}
function fpsToRGB(value) {
return {
r: Math.max(0.0, Math.min(1.0, 1.0 - (value/60))),
g: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15)))),
b: Math.max(0.0, Math.min(1.0, ((value-15)/(maxFPS-15))))
};
}
var now = (window.performance && performance.now) ? performance.now.bind(performance) : Date.now;
var Stats = function(gl, enablePerformanceMonitoring) {
this.gl = gl;
this.enablePerformanceMonitoring = enablePerformanceMonitoring;
this.sevenSegmentText = new SevenSegmentText(gl);
this.startTime = now();
this.prevFrameTime = this.startTime;
this.prevGraphUpdateTime = this.startTime;
this.frames = 0;
this.fpsAverage = 0;
this.fpsMin = 0;
this.fpsStep = enablePerformanceMonitoring ? 1000 : 250;
this.orthoProjMatrix = new Float32Array(16);
this.orthoViewMatrix = new Float32Array(16);
this.modelViewMatrix = new Float32Array(16);
// Hard coded because it doesn't change:
// Scale by 0.075 in X and Y
// Translate into upper left corner w/ z = 0.02
this.textMatrix = new Float32Array([
0.075, 0, 0, 0,
0, 0.075, 0, 0,
0, 0, 1, 0,
-0.3625, 0.3625, 0.02, 1
]);
this.lastSegment = 0;
this.attribs = {
position: 0,
color: 1
};
this.program = linkProgram(gl, statsVS, statsFS, this.attribs);
this.uniforms = getProgramUniforms(gl, this.program);
var fpsVerts = [];
var fpsIndices = [];
// Graph geometry
for (var i = 0; i < segments; ++i) {
// Bar top
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
// Bar bottom
fpsVerts.push(segmentToX(i), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
fpsVerts.push(segmentToX(i+1), fpsToY(0), 0.02, 0.0, 1.0, 1.0);
var idx = i * 4;
fpsIndices.push(idx, idx+3, idx+1,
idx+3, idx, idx+2);
}
function addBGSquare(left, bottom, right, top, z, r, g, b) {
var idx = fpsVerts.length / 6;
fpsVerts.push(left, bottom, z, r, g, b);
fpsVerts.push(right, top, z, r, g, b);
fpsVerts.push(left, top, z, r, g, b);
fpsVerts.push(right, bottom, z, r, g, b);
fpsIndices.push(idx, idx+1, idx+2,
idx, idx+3, idx+1);
};
// Panel Background
addBGSquare(-0.5, -0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.125);
// FPS Background
addBGSquare(-0.45, -0.45, 0.45, 0.25, 0.01, 0.0, 0.0, 0.4);
// 30 FPS line
addBGSquare(-0.45, fpsToY(30), 0.45, fpsToY(32), 0.015, 0.5, 0.0, 0.5);
// 60 FPS line
addBGSquare(-0.45, fpsToY(60), 0.45, fpsToY(62), 0.015, 0.2, 0.0, 0.75);
this.fpsVertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fpsVerts), gl.DYNAMIC_DRAW);
this.fpsIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(fpsIndices), gl.STATIC_DRAW);
this.fpsIndexCount = fpsIndices.length;
};
Stats.prototype.begin = function() {
this.startTime = now();
};
Stats.prototype.end = function() {
var time = now();
var frameFps = 1000 / (time - this.prevFrameTime);
this.prevFrameTime = time;
this.fpsMin = this.frames ? Math.min(this.fpsMin, frameFps) : frameFps;
this.frames++;
if (time > this.prevGraphUpdateTime + this.fpsStep) {
var intervalTime = time - this.prevGraphUpdateTime;
this.fpsAverage = Math.round(1000 / (intervalTime / this.frames));
// Draw both average and minimum FPS for this period
// so that dropped frames are more clearly visible.
this.updateGraph(this.fpsMin, this.fpsAverage);
if (this.enablePerformanceMonitoring) {
console.log("Average FPS: " + this.fpsAverage + " " +
"Min FPS: " + this.fpsMin);
}
this.prevGraphUpdateTime = time;
this.frames = 0;
this.fpsMin = 0;
}
};
Stats.prototype.updateGraph = function(valueLow, valueHigh) {
var gl = this.gl;
var color = fpsToRGB(valueLow);
// Draw a range from the low to high value. Artificially widen the
// range a bit to ensure that near-equal values still remain
// visible - the logic here should match that used by the
// "60 FPS line" setup below. Hitting 60fps consistently will
// keep the top half of the 60fps background line visible.
var y0 = fpsToY(valueLow - 1);
var y1 = fpsToY(valueHigh + 1);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
// Update the current segment with the new FPS value
var updateVerts = [
segmentToX(this.lastSegment), y1, 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), y1, 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment), y0, 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), y0, 0.02, color.r, color.g, color.b,
];
// Re-shape the next segment into the green "progress" line
color.r = 0.2;
color.g = 1.0;
color.b = 0.2;
if (this.lastSegment == segments - 1) {
// If we're updating the last segment we need to do two bufferSubDatas
// to update the segment and turn the first segment into the progress line.
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
updateVerts = [
segmentToX(0), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(0), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(.25), fpsToY(0), 0.02, color.r, color.g, color.b
];
gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(updateVerts));
} else {
updateVerts.push(
segmentToX(this.lastSegment+1), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(maxFPS), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1), fpsToY(0), 0.02, color.r, color.g, color.b,
segmentToX(this.lastSegment+1.25), fpsToY(0), 0.02, color.r, color.g, color.b
);
gl.bufferSubData(gl.ARRAY_BUFFER, this.lastSegment * 24 * 4, new Float32Array(updateVerts));
}
this.lastSegment = (this.lastSegment+1) % segments;
};
Stats.prototype.render = function(projectionMat, modelViewMat) {
var gl = this.gl;
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.uniforms.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(this.uniforms.modelViewMat, false, modelViewMat);
gl.enableVertexAttribArray(this.attribs.position);
gl.enableVertexAttribArray(this.attribs.color);
gl.bindBuffer(gl.ARRAY_BUFFER, this.fpsVertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.fpsIndexBuffer);
gl.vertexAttribPointer(this.attribs.position, 3, gl.FLOAT, false, 24, 0);
gl.vertexAttribPointer(this.attribs.color, 3, gl.FLOAT, false, 24, 12);
// Draw the graph and background in a single call
gl.drawElements(gl.TRIANGLES, this.fpsIndexCount, gl.UNSIGNED_SHORT, 0);
mat4_multiply(this.modelViewMatrix, modelViewMat, this.textMatrix);
this.sevenSegmentText.render(projectionMat, this.modelViewMatrix, this.fpsAverage + " FP5");
}
Stats.prototype.renderOrtho = function(x, y, width, height) {
var canvas = this.gl.canvas;
if (x == undefined || y == undefined) {
x = 10 * window.devicePixelRatio;
y = 10 * window.devicePixelRatio;
}
if (width == undefined || height == undefined) {
width = 75 * window.devicePixelRatio;
height = 75 * window.devicePixelRatio;
}
mat4_ortho(this.orthoProjMatrix, 0, canvas.width, 0, canvas.height, 0.1, 1024);
mat4_identity(this.orthoViewMatrix);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [x, canvas.height - height - y, -1]);
mat4_scale(this.orthoViewMatrix, this.orthoViewMatrix, [width, height, 1]);
mat4_translate(this.orthoViewMatrix, this.orthoViewMatrix, [0.5, 0.5, 0]);
this.render(this.orthoProjMatrix, this.orthoViewMatrix);
}
return Stats;
})();

View File

@ -0,0 +1,687 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Handles loading of textures of mutliple formats, tries to be efficent about it.
Formats supported will vary by devices. Use the .supports<format>() functions
to determine if a format is supported. Most of the time you can just call
loader.loadTexture("url"); and it will handle it based on the extension.
If the extension can't be relied on use the corresponding
.load<Extension>("url") calls.
*/
var WGLUTextureLoader = (function() {
"use strict";
//============================//
// DXT constants and utilites //
//============================//
// Utility functions
// Builds a numeric code for a given fourCC string
function fourCCToInt32(value) {
return value.charCodeAt(0) +
(value.charCodeAt(1) << 8) +
(value.charCodeAt(2) << 16) +
(value.charCodeAt(3) << 24);
}
// Turns a fourCC numeric code into a string
function int32ToFourCC(value) {
return String.fromCharCode(
value & 0xff,
(value >> 8) & 0xff,
(value >> 16) & 0xff,
(value >> 24) & 0xff
);
}
// Calcualates the size of a compressed texture level in bytes
function textureLevelSize(format, width, height) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGB_ETC1_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 8;
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return ((width + 3) >> 2) * ((height + 3) >> 2) * 16;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
return Math.floor((Math.max(width, 8) * Math.max(height, 8) * 4 + 7) / 8);
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return Math.floor((Math.max(width, 16) * Math.max(height, 8) * 2 + 7) / 8);
default:
return 0;
}
}
// DXT formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/
var COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0;
var COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1;
var COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2;
var COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3;
// ATC formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_atc/
var COMPRESSED_RGB_ATC_WEBGL = 0x8C92;
var COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL = 0x8C93;
var COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL = 0x87EE;
// DXT values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
var DDS_MAGIC = 0x20534444;
var DDSD_MIPMAPCOUNT = 0x20000;
var DDPF_FOURCC = 0x4;
var DDS_HEADER_LENGTH = 31; // The header length in 32 bit ints.
// Offsets into the header array.
var DDS_HEADER_MAGIC = 0;
var DDS_HEADER_SIZE = 1;
var DDS_HEADER_FLAGS = 2;
var DDS_HEADER_HEIGHT = 3;
var DDS_HEADER_WIDTH = 4;
var DDS_HEADER_MIPMAPCOUNT = 7;
var DDS_HEADER_PF_FLAGS = 20;
var DDS_HEADER_PF_FOURCC = 21;
// FourCC format identifiers.
var FOURCC_DXT1 = fourCCToInt32("DXT1");
var FOURCC_DXT3 = fourCCToInt32("DXT3");
var FOURCC_DXT5 = fourCCToInt32("DXT5");
var FOURCC_ATC = fourCCToInt32("ATC ");
var FOURCC_ATCA = fourCCToInt32("ATCA");
var FOURCC_ATCI = fourCCToInt32("ATCI");
//==================//
// Crunch constants //
//==================//
// Taken from crnlib.h
var CRN_FORMAT = {
cCRNFmtInvalid: -1,
cCRNFmtDXT1: 0,
// cCRNFmtDXT3 is not currently supported when writing to CRN - only DDS.
cCRNFmtDXT3: 1,
cCRNFmtDXT5: 2
// Crunch supports more formats than this, but we can't use them here.
};
// Mapping of Crunch formats to DXT formats.
var DXT_FORMAT_MAP = {};
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT1] = COMPRESSED_RGB_S3TC_DXT1_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT3] = COMPRESSED_RGBA_S3TC_DXT3_EXT;
DXT_FORMAT_MAP[CRN_FORMAT.cCRNFmtDXT5] = COMPRESSED_RGBA_S3TC_DXT5_EXT;
//===============//
// PVR constants //
//===============//
// PVR formats, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_pvrtc/
var COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00;
var COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01;
var COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02;
var COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03;
// ETC1 format, from:
// http://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/
var COMPRESSED_RGB_ETC1_WEBGL = 0x8D64;
var PVR_FORMAT_2BPP_RGB = 0;
var PVR_FORMAT_2BPP_RGBA = 1;
var PVR_FORMAT_4BPP_RGB = 2;
var PVR_FORMAT_4BPP_RGBA = 3;
var PVR_FORMAT_ETC1 = 6;
var PVR_FORMAT_DXT1 = 7;
var PVR_FORMAT_DXT3 = 9;
var PVR_FORMAT_DXT5 = 5;
var PVR_HEADER_LENGTH = 13; // The header length in 32 bit ints.
var PVR_MAGIC = 0x03525650; //0x50565203;
// Offsets into the header array.
var PVR_HEADER_MAGIC = 0;
var PVR_HEADER_FORMAT = 2;
var PVR_HEADER_HEIGHT = 6;
var PVR_HEADER_WIDTH = 7;
var PVR_HEADER_MIPMAPCOUNT = 11;
var PVR_HEADER_METADATA = 12;
//============//
// Misc Utils //
//============//
// When an error occurs set the texture to a 1x1 black pixel
// This prevents WebGL errors from attempting to use unrenderable textures
// and clears out stale data if we're re-using a texture.
function clearOnError(gl, error, texture, callback) {
if (console) {
console.error(error);
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// Notify the user that an error occurred and the texture is ready.
if (callback) { callback(texture, error, null); }
}
function isPowerOfTwo(n) {
return (n & (n - 1)) === 0;
}
function getExtension(gl, name) {
var vendorPrefixes = ["", "WEBKIT_", "MOZ_"];
var ext = null;
for (var i in vendorPrefixes) {
ext = gl.getExtension(vendorPrefixes[i] + name);
if (ext) { break; }
}
return ext;
}
//==================//
// DDS File Reading //
//==================//
// Parse a DDS file and provide information about the raw DXT data it contains to the given callback.
function parseDDS(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, DDS_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[DDS_HEADER_MAGIC] != DDS_MAGIC) {
errorCallback("Invalid magic number in DDS header");
return 0;
}
if(!header[DDS_HEADER_PF_FLAGS] & DDPF_FOURCC) {
errorCallback("Unsupported format, must contain a FourCC code");
return 0;
}
// Determine what type of compressed data the file contains.
var fourCC = header[DDS_HEADER_PF_FOURCC];
var internalFormat;
switch(fourCC) {
case FOURCC_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case FOURCC_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case FOURCC_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
case FOURCC_ATC:
internalFormat = COMPRESSED_RGB_ATC_WEBGL;
break;
case FOURCC_ATCA:
internalFormat = COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL;
break;
case FOURCC_ATCI:
internalFormat = COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL;
break;
default:
errorCallback("Unsupported FourCC code: " + int32ToFourCC(fourCC));
return;
}
// Determine how many mipmap levels the file contains.
var levels = 1;
if(header[DDS_HEADER_FLAGS] & DDSD_MIPMAPCOUNT) {
levels = Math.max(1, header[DDS_HEADER_MIPMAPCOUNT]);
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[DDS_HEADER_WIDTH];
var height = header[DDS_HEADER_HEIGHT];
var dataOffset = header[DDS_HEADER_SIZE] + 4;
var dxtData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the DXT information to the callback for uploading.
callback(dxtData, width, height, levels, internalFormat);
}
//==================//
// PVR File Reading //
//==================//
// Parse a PVR file and provide information about the raw texture data it contains to the given callback.
function parsePVR(arrayBuffer, callback, errorCallback) {
// Callbacks must be provided.
if (!callback || !errorCallback) { return; }
// Get a view of the arrayBuffer that represents the DDS header.
var header = new Int32Array(arrayBuffer, 0, PVR_HEADER_LENGTH);
// Do some sanity checks to make sure this is a valid DDS file.
if(header[PVR_HEADER_MAGIC] != PVR_MAGIC) {
errorCallback("Invalid magic number in PVR header");
return 0;
}
// Determine what type of compressed data the file contains.
var format = header[PVR_HEADER_FORMAT];
var internalFormat;
switch(format) {
case PVR_FORMAT_2BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_2BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGB:
internalFormat = COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_4BPP_RGBA:
internalFormat = COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
break;
case PVR_FORMAT_ETC1:
internalFormat = COMPRESSED_RGB_ETC1_WEBGL;
break;
case PVR_FORMAT_DXT1:
internalFormat = COMPRESSED_RGB_S3TC_DXT1_EXT;
break;
case PVR_FORMAT_DXT3:
internalFormat = COMPRESSED_RGBA_S3TC_DXT3_EXT;
break;
case PVR_FORMAT_DXT5:
internalFormat = COMPRESSED_RGBA_S3TC_DXT5_EXT;
break;
default:
errorCallback("Unsupported PVR format: " + format);
return;
}
// Gather other basic metrics and a view of the raw the DXT data.
var width = header[PVR_HEADER_WIDTH];
var height = header[PVR_HEADER_HEIGHT];
var levels = header[PVR_HEADER_MIPMAPCOUNT];
var dataOffset = header[PVR_HEADER_METADATA] + 52;
var pvrtcData = new Uint8Array(arrayBuffer, dataOffset);
// Pass the PVRTC information to the callback for uploading.
callback(pvrtcData, width, height, levels, internalFormat);
}
//=============//
// IMG loading //
//=============//
/*
This function provides a method for loading webgl textures using a pool of
image elements, which has very low memory overhead. For more details see:
http://blog.tojicode.com/2012/03/javascript-memory-optimization-and.html
*/
var loadImgTexture = (function createTextureLoader() {
var MAX_CACHE_IMAGES = 16;
var textureImageCache = new Array(MAX_CACHE_IMAGES);
var cacheTop = 0;
var remainingCacheImages = MAX_CACHE_IMAGES;
var pendingTextureRequests = [];
var TextureImageLoader = function(loadedCallback) {
var self = this;
var blackPixel = new Uint8Array([0, 0, 0]);
this.gl = null;
this.texture = null;
this.callback = null;
this.image = new Image();
this.image.crossOrigin = 'anonymous';
this.image.addEventListener('load', function() {
var gl = self.gl;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
var startTime = Date.now();
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, self.image);
if (isPowerOfTwo(self.image.width) && isPowerOfTwo(self.image.height)) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
var uploadTime = Date.now() - startTime;
if(self.callback) {
var stats = {
width: self.image.width,
height: self.image.height,
internalFormat: gl.RGBA,
levelZeroSize: self.image.width * self.image.height * 4,
uploadTime: uploadTime
};
self.callback(self.texture, null, stats);
}
loadedCallback(self);
}, false);
this.image.addEventListener('error', function(ev) {
clearOnError(self.gl, 'Image could not be loaded: ' + self.image.src, self.texture, self.callback);
loadedCallback(self);
}, false);
};
TextureImageLoader.prototype.loadTexture = function(gl, src, texture, callback) {
this.gl = gl;
this.texture = texture;
this.callback = callback;
this.image.src = src;
};
var PendingTextureRequest = function(gl, src, texture, callback) {
this.gl = gl;
this.src = src;
this.texture = texture;
this.callback = callback;
};
function releaseTextureImageLoader(til) {
var req;
if(pendingTextureRequests.length) {
req = pendingTextureRequests.shift();
til.loadTexture(req.gl, req.src, req.texture, req.callback);
} else {
textureImageCache[cacheTop++] = til;
}
}
return function(gl, src, texture, callback) {
var til;
if(cacheTop) {
til = textureImageCache[--cacheTop];
til.loadTexture(gl, src, texture, callback);
} else if (remainingCacheImages) {
til = new TextureImageLoader(releaseTextureImageLoader);
til.loadTexture(gl, src, texture, callback);
--remainingCacheImages;
} else {
pendingTextureRequests.push(new PendingTextureRequest(gl, src, texture, callback));
}
return texture;
};
})();
//=====================//
// TextureLoader Class //
//=====================//
// This class is our public interface.
var TextureLoader = function(gl) {
this.gl = gl;
// Load the compression format extensions, if available
this.dxtExt = getExtension(gl, "WEBGL_compressed_texture_s3tc");
this.pvrtcExt = getExtension(gl, "WEBGL_compressed_texture_pvrtc");
this.atcExt = getExtension(gl, "WEBGL_compressed_texture_atc");
this.etc1Ext = getExtension(gl, "WEBGL_compressed_texture_etc1");
// Returns whether or not the compressed format is supported by the WebGL implementation
TextureLoader.prototype._formatSupported = function(format) {
switch (format) {
case COMPRESSED_RGB_S3TC_DXT1_EXT:
case COMPRESSED_RGBA_S3TC_DXT3_EXT:
case COMPRESSED_RGBA_S3TC_DXT5_EXT:
return !!this.dxtExt;
case COMPRESSED_RGB_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_4BPPV1_IMG:
case COMPRESSED_RGB_PVRTC_2BPPV1_IMG:
case COMPRESSED_RGBA_PVRTC_2BPPV1_IMG:
return !!this.pvrtcExt;
case COMPRESSED_RGB_ATC_WEBGL:
case COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL:
case COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL:
return !!this.atcExt;
case COMPRESSED_RGB_ETC1_WEBGL:
return !!this.etc1Ext;
default:
return false;
}
}
// Uploads compressed texture data to the GPU.
TextureLoader.prototype._uploadCompressedData = function(data, width, height, levels, internalFormat, texture, callback) {
var gl = this.gl;
gl.bindTexture(gl.TEXTURE_2D, texture);
var offset = 0;
var stats = {
width: width,
height: height,
internalFormat: internalFormat,
levelZeroSize: textureLevelSize(internalFormat, width, height),
uploadTime: 0
};
var startTime = Date.now();
// Loop through each mip level of compressed texture data provided and upload it to the given texture.
for (var i = 0; i < levels; ++i) {
// Determine how big this level of compressed texture data is in bytes.
var levelSize = textureLevelSize(internalFormat, width, height);
// Get a view of the bytes for this level of DXT data.
var dxtLevel = new Uint8Array(data.buffer, data.byteOffset + offset, levelSize);
// Upload!
gl.compressedTexImage2D(gl.TEXTURE_2D, i, internalFormat, width, height, 0, dxtLevel);
// The next mip level will be half the height and width of this one.
width = width >> 1;
height = height >> 1;
// Advance the offset into the compressed texture data past the current mip level's data.
offset += levelSize;
}
stats.uploadTime = Date.now() - startTime;
// We can't use gl.generateMipmaps with compressed textures, so only use
// mipmapped filtering if the compressed texture data contained mip levels.
if (levels > 1) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
// Notify the user that the texture is ready.
if (callback) { callback(texture, null, stats); }
}
TextureLoader.prototype.supportsDXT = function() {
return !!this.dxtExt;
}
TextureLoader.prototype.supportsPVRTC = function() {
return !!this.pvrtcExt;
}
TextureLoader.prototype.supportsATC = function() {
return !!this.atcExt;
}
TextureLoader.prototype.supportsETC1 = function() {
return !!this.etc1Ext;
}
// Loads a image file into the given texture.
// Supports any format that can be loaded into an img tag
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadIMG = function(src, texture, callback) {
if(!texture) {
texture = this.gl.createTexture();
}
loadImgTexture(gl, src, texture, callback);
return texture;
}
// Loads a DDS file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadDDS = function(src, texture, callback) {
var self = this;
if (!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parseDDS(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed DXT data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a PVR file into the given texture.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadPVR = function(src, texture, callback) {
var self = this;
if(!texture) {
texture = this.gl.createTexture();
}
// Load the file via XHR.
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function (ev) {
if (xhr.status == 200) {
// If the file loaded successfully parse it.
parsePVR(xhr.response, function(dxtData, width, height, levels, internalFormat) {
if (!self._formatSupported(internalFormat)) {
clearOnError(self.gl, "Texture format not supported", texture, callback);
return;
}
// Upload the parsed PVR data to the texture.
self._uploadCompressedData(dxtData, width, height, levels, internalFormat, texture, callback);
}, function(error) {
clearOnError(self.gl, error, texture, callback);
});
} else {
clearOnError(self.gl, xhr.statusText, texture, callback);
}
}, false);
xhr.open('GET', src, true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
return texture;
}
// Loads a texture from a file. Guesses the type based on extension.
// If no texture is provided one is created and returned.
TextureLoader.prototype.loadTexture = function(src, texture, callback) {
// Shamelessly lifted from StackOverflow :)
// http://stackoverflow.com/questions/680929
var re = /(?:\.([^.]+))?$/;
var ext = re.exec(src)[1] || '';
ext = ext.toLowerCase();
switch(ext) {
case 'dds':
return this.loadDDS(src, texture, callback);
case 'pvr':
return this.loadPVR(src, texture, callback);
default:
return this.loadIMG(src, texture, callback);
}
}
// Sets a texture to a solid RGBA color
// If no texture is provided one is created and returned.
TextureLoader.prototype.makeSolidColor = function(r, g, b, a, texture) {
var gl = this.gl;
var data = new Uint8Array([r, g, b, a]);
if(!texture) {
texture = gl.createTexture();
}
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
return texture;
}
}
return TextureLoader;
})();

View File

@ -0,0 +1,99 @@
/*
Copyright (c) 2015, Brandon Jones.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Provides a simple way to get values from the query string if they're present
and use a default value if not. Not strictly a "WebGL" utility, but I use it
frequently enough for debugging that I wanted to include it here.
Example:
For the URL http://example.com/index.html?particleCount=1000
WGLUUrl.getInt("particleCount", 100); // URL overrides, returns 1000
WGLUUrl.getInt("particleSize", 10); // Not in URL, returns default of 10
*/
var WGLUUrl = (function() {
"use strict";
var urlArgs = null;
window.onhashchange = function() {
// Force re-parsing on next access
urlArgs = null;
};
function ensureArgsCached() {
if (!urlArgs) {
urlArgs = {};
var query = window.location.search.substring(1) || window.location.hash.substring(1);
var vars = query.split("&");
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split("=");
urlArgs[pair[0].toLowerCase()] = unescape(pair[1]);
}
}
}
function getString(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return urlArgs[lcaseName];
}
return defaultValue;
}
function getInt(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10);
}
return defaultValue;
}
function getFloat(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseFloat(urlArgs[lcaseName]);
}
return defaultValue;
}
function getBool(name, defaultValue) {
ensureArgsCached();
var lcaseName = name.toLowerCase();
if (lcaseName in urlArgs) {
return parseInt(urlArgs[lcaseName], 10) != 0;
}
return defaultValue;
}
return {
getString: getString,
getInt: getInt,
getFloat: getFloat,
getBool: getBool
};
})();

View File

@ -0,0 +1,284 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function (VRAudioPanner) {
'use strict';
// Default settings for panning. Cone parameters are experimentally
// determined.
var _PANNING_MODEL = 'HRTF';
var _DISTANCE_MODEL = 'inverse';
var _CONE_INNER_ANGLE = 60;
var _CONE_OUTER_ANGLE = 120;
var _CONE_OUTER_GAIN = 0.25;
// Super-simple web audio version detection.
var _LEGACY_WEBAUDIO = window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext');
if (_LEGACY_WEBAUDIO)
console.log('[VRAudioPanner] outdated version of Web Audio API detected.');
// Master audio context.
var _context = _LEGACY_WEBAUDIO ? new webkitAudioContext() : new AudioContext();
/**
* A buffer source player with HRTF panning for testing purpose.
* @param {Object} options Default options.
* @param {Number} options.gain Sound object gain. (0.0~1.0)
* @param {Number} options.buffer AudioBuffer to play.
* @param {Number} options.detune Detune parameter. (cent)
* @param {Array} options.position x, y, z position in a array.
*/
function TestSource (options) {
this._src = _context.createBufferSource();
this._out = _context.createGain();
this._panner = _context.createPanner();
this._analyser = _context.createAnalyser();
this._src.connect(this._out);
this._out.connect(this._analyser);
this._analyser.connect(this._panner);
this._panner.connect(_context.destination);
this._src.buffer = options.buffer;
this._src.loop = true;
this._out.gain.value = options.gain;
this._analyser.fftSize = 1024;
this._analyser.smoothingTimeConstant = 0.85;
this._lastRMSdB = 0.0;
this._panner.panningModel = _PANNING_MODEL;
this._panner.distanceModel = _DISTANCE_MODEL;
this._panner.coneInnerAngle = _CONE_INNER_ANGLE;
this._panner.coneOuterAngle = _CONE_OUTER_ANGLE;
this._panner.coneOuterGain = _CONE_OUTER_GAIN;
this._position = [0, 0, 0];
this._orientation = [1, 0, 0];
this._analyserBuffer = new Uint8Array(this._analyser.fftSize);
if (!_LEGACY_WEBAUDIO) {
this._src.detune.value = (options.detune || 0);
this._analyserBuffer = new Float32Array(this._analyser.fftSize);
}
this.setPosition(options.position);
this.setOrientation(options.orientation);
};
TestSource.prototype.start = function () {
this._src.start(0);
};
TestSource.prototype.stop = function () {
this._src.stop(0);
};
TestSource.prototype.getPosition = function () {
return this._position;
};
TestSource.prototype.setPosition = function (position) {
if (position) {
this._position[0] = position[0];
this._position[1] = position[1];
this._position[2] = position[2];
}
this._panner.setPosition.apply(this._panner, this._position);
};
TestSource.prototype.getOrientation = function () {
return this._orientation;
};
TestSource.prototype.setOrientation = function (orientation) {
if (orientation) {
this._orientation[0] = orientation[0];
this._orientation[1] = orientation[1];
this._orientation[2] = orientation[2];
}
this._panner.setOrientation.apply(this._panner, this._orientation);
};
TestSource.prototype.getCubeScale = function () {
// Safari does not support getFloatTimeDomainData(), so fallback to the
// naive spectral energy sum. This is relative expensive.
if (_LEGACY_WEBAUDIO) {
this._analyser.getByteFrequencyData(this._analyserBuffer);
for (var k = 0, total = 0; k < this._analyserBuffer.length; ++k)
total += this._analyserBuffer[k];
total /= this._analyserBuffer.length;
return (total / 256.0) * 1.5;
}
this._analyser.getFloatTimeDomainData(this._analyserBuffer);
for (var i = 0, sum = 0; i < this._analyserBuffer.length; ++i)
sum += this._analyserBuffer[i] * this._analyserBuffer[i];
// Calculate RMS and convert it to DB for perceptual loudness.
var rms = Math.sqrt(sum / this._analyserBuffer.length);
var db = 30 + 10 / Math.LN10 * Math.log(rms <= 0 ? 0.0001 : rms);
// Moving average with the alpha of 0.525. Experimentally determined.
this._lastRMSdB += 0.525 * ((db < 0 ? 0 : db) - this._lastRMSdB);
// Scaling by 1/30 is also experimentally determined.
return this._lastRMSdB / 30.0;
};
// Internal helper: load a file into a buffer. (github.com/hoch/spiral)
function _loadAudioFile(context, fileInfo, done) {
var xhr = new XMLHttpRequest();
xhr.open('GET', fileInfo.url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
if (xhr.status === 200) {
context.decodeAudioData(xhr.response,
function (buffer) {
console.log('[VRAudioPanner] File loaded: ' + fileInfo.url);
done(fileInfo.name, buffer);
},
function (message) {
console.log('[VRAudioPanner] Decoding failure: ' + fileInfo.url + ' (' + message + ')');
done(fileInfo.name, null);
});
} else {
console.log('[VRAudioPanner] XHR Error: ' + fileInfo.url + ' (' + xhr.statusText + ')');
done(fileInfo.name, null);
}
};
xhr.onerror = function (event) {
console.log('[VRAudioPanner] XHR Network failure: ' + fileInfo.url);
done(fileInfo.name, null);
};
xhr.send();
}
/**
* A wrapper/container class for multiple file loaders.
* @param {Object} context AudioContext
* @param {Object} audioFileData Audio file info in the format of {name, url}
* @param {Function} resolve Resolution handler for promise.
* @param {Function} reject Rejection handler for promise.
* @param {Function} progress Progress event handler.
*/
function AudioBufferManager(context, audioFileData, resolve, reject, progress) {
this._context = context;
this._resolve = resolve;
this._reject = reject;
this._progress = progress;
this._buffers = new Map();
this._loadingTasks = {};
// Iterating file loading.
for (var i = 0; i < audioFileData.length; i++) {
var fileInfo = audioFileData[i];
// Check for duplicates filename and quit if it happens.
if (this._loadingTasks.hasOwnProperty(fileInfo.name)) {
console.log('[VRAudioPanner] Duplicated filename in AudioBufferManager: ' + fileInfo.name);
return;
}
// Mark it as pending (0)
this._loadingTasks[fileInfo.name] = 0;
_loadAudioFile(this._context, fileInfo, this._done.bind(this));
}
}
AudioBufferManager.prototype._done = function (filename, buffer) {
// Label the loading task.
this._loadingTasks[filename] = buffer !== null ? 'loaded' : 'failed';
// A failed task will be a null buffer.
this._buffers.set(filename, buffer);
this._updateProgress(filename);
};
AudioBufferManager.prototype._updateProgress = function (filename) {
var numberOfFinishedTasks = 0, numberOfFailedTask = 0;
var numberOfTasks = 0;
for (var task in this._loadingTasks) {
numberOfTasks++;
if (this._loadingTasks[task] === 'loaded')
numberOfFinishedTasks++;
else if (this._loadingTasks[task] === 'failed')
numberOfFailedTask++;
}
if (typeof this._progress === 'function')
this._progress(filename, numberOfFinishedTasks, numberOfTasks);
if (numberOfFinishedTasks === numberOfTasks)
this._resolve(this._buffers);
if (numberOfFinishedTasks + numberOfFailedTask === numberOfTasks)
this._reject(this._buffers);
};
/**
* Returns true if the web audio implementation is outdated.
* @return {Boolean}
*/
VRAudioPanner.isWebAudioOutdated = function () {
return _LEGACY_WEBAUDIO;
}
/**
* Static method for updating listener's position.
* @param {Array} position Listener position in x, y, z.
*/
VRAudioPanner.setListenerPosition = function (position) {
_context.listener.setPosition.apply(_context.listener, position);
};
/**
* Static method for updating listener's orientation.
* @param {Array} orientation Listener orientation in x, y, z.
* @param {Array} orientation Listener's up vector in x, y, z.
*/
VRAudioPanner.setListenerOrientation = function (orientation, upvector) {
_context.listener.setOrientation(
orientation[0], orientation[1], orientation[2],
upvector[0], upvector[1], upvector[2]);
};
/**
* Load an audio file asynchronously.
* @param {Array} dataModel Audio file info in the format of {name, url}
* @param {Function} onprogress Callback function for reporting the progress.
* @return {Promise} Promise.
*/
VRAudioPanner.loadAudioFiles = function (dataModel, onprogress) {
return new Promise(function (resolve, reject) {
new AudioBufferManager(_context, dataModel, resolve, reject, onprogress);
});
};
/**
* Create a source player. See TestSource class for parameter description.
* @return {TestSource}
*/
VRAudioPanner.createTestSource = function (options) {
return new TestSource(options);
};
})(VRAudioPanner = {});

View File

@ -0,0 +1,223 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
/*
Like CubeSea, but designed around a users physical space. One central platform
that maps to the users play area and several floating cubes that sit just
those boundries (just to add visual interest)
*/
window.VRCubeIsland = (function () {
"use strict";
var cubeIslandVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"attribute vec3 normal;",
"varying vec2 vTexCoord;",
"varying vec3 vLight;",
"const vec3 lightDir = vec3(0.75, 0.5, 1.0);",
"const vec3 ambientColor = vec3(0.5, 0.5, 0.5);",
"const vec3 lightColor = vec3(0.75, 0.75, 0.75);",
"void main() {",
" float lightFactor = max(dot(normalize(lightDir), normal), 0.0);",
" vLight = ambientColor + (lightColor * lightFactor);",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeIslandFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"varying vec3 vLight;",
"void main() {",
" gl_FragColor = vec4(vLight, 1.0) * texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var CubeIsland = function (gl, texture, width, depth) {
this.gl = gl;
this.statsMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeIslandVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(cubeIslandFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1,
normal: 2
});
this.program.link();
this.vertBuffer = gl.createBuffer();
this.indexBuffer = gl.createBuffer();
this.resize(width, depth);
};
CubeIsland.prototype.resize = function (width, depth) {
var gl = this.gl;
this.width = width;
this.depth = depth;
var cubeVerts = [];
var cubeIndices = [];
// Build a single box.
function appendBox (left, bottom, back, right, top, front) {
// Bottom
var idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, back, 0.0, 1.0, 0.0, -1.0, 0.0);
cubeVerts.push(right, bottom, back, 1.0, 1.0, 0.0, -1.0, 0.0);
cubeVerts.push(right, bottom, front, 1.0, 0.0, 0.0, -1.0, 0.0);
cubeVerts.push(left, bottom, front, 0.0, 0.0, 0.0, -1.0, 0.0);
// Top
idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, top, back, 0.0, 0.0, 0.0, 1.0, 0.0);
cubeVerts.push(right, top, back, 1.0, 0.0, 0.0, 1.0, 0.0);
cubeVerts.push(right, top, front, 1.0, 1.0, 0.0, 1.0, 0.0);
cubeVerts.push(left, top, front, 0.0, 1.0, 0.0, 1.0, 0.0);
// Left
idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 0.0, 1.0, -1.0, 0.0, 0.0);
cubeVerts.push(left, top, back, 0.0, 0.0, -1.0, 0.0, 0.0);
cubeVerts.push(left, top, front, 1.0, 0.0, -1.0, 0.0, 0.0);
cubeVerts.push(left, bottom, front, 1.0, 1.0, -1.0, 0.0, 0.0);
// Right
idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(right, bottom, back, 1.0, 1.0, 1.0, 0.0, 0.0);
cubeVerts.push(right, top, back, 1.0, 0.0, 1.0, 0.0, 0.0);
cubeVerts.push(right, top, front, 0.0, 0.0, 1.0, 0.0, 0.0);
cubeVerts.push(right, bottom, front, 0.0, 1.0, 1.0, 0.0, 0.0);
// Back
idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(left, bottom, back, 1.0, 1.0, 0.0, 0.0, -1.0);
cubeVerts.push(right, bottom, back, 0.0, 1.0, 0.0, 0.0, -1.0);
cubeVerts.push(right, top, back, 0.0, 0.0, 0.0, 0.0, -1.0);
cubeVerts.push(left, top, back, 1.0, 0.0, 0.0, 0.0, -1.0);
// Front
idx = cubeVerts.length / 8.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(left, bottom, front, 0.0, 1.0, 0.0, 0.0, 1.0);
cubeVerts.push(right, bottom, front, 1.0, 1.0, 0.0, 0.0, 1.0);
cubeVerts.push(right, top, front, 1.0, 0.0, 0.0, 0.0, 1.0);
cubeVerts.push(left, top, front, 0.0, 0.0, 0.0, 0.0, 1.0);
}
// Appends a cube with the given centerpoint and size.
function appendCube (x, y, z, size) {
var halfSize = size * 0.5;
appendBox(x - halfSize, y - halfSize, z - halfSize,
x + halfSize, y + halfSize, z + halfSize);
}
// Main "island", covers where the user can safely stand. Top of the cube
// (the ground the user stands on) should be at Y=0 to align with users
// floor. X=0 and Z=0 should be at the center of the users play space.
appendBox(-width * 0.5, -width, -depth * 0.5, width * 0.5, 0, depth * 0.5);
// A sprinkling of other cubes to make things more visually interesting.
appendCube(1.1, 0.3, (-depth * 0.5) - 0.8, 0.5);
appendCube(-0.5, 1.0, (-depth * 0.5) - 0.9, 0.75);
appendCube(0.6, 1.5, (-depth * 0.5) - 0.6, 0.4);
appendCube(-1.0, 0.5, (-depth * 0.5) - 0.5, 0.2);
appendCube((-width * 0.5) - 0.8, 0.3, -1.1, 0.5);
appendCube((-width * 0.5) - 0.9, 1.0, 0.5, 0.75);
appendCube((-width * 0.5) - 0.6, 1.5, -0.6, 0.4);
appendCube((-width * 0.5) - 0.5, 0.5, 1.0, 0.2);
appendCube((width * 0.5) + 0.8, 0.3, 1.1, 0.5);
appendCube((width * 0.5) + 0.9, 1.0, -0.5, 0.75);
appendCube((width * 0.5) + 0.6, 1.5, 0.6, 0.4);
appendCube((width * 0.5) + 0.5, 0.5, -1.0, 0.2);
appendCube(1.1, 1.4, (depth * 0.5) + 0.8, 0.5);
appendCube(-0.5, 1.0, (depth * 0.5) + 0.9, 0.75);
appendCube(0.6, 0.4, (depth * 0.5) + 0.6, 0.4);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.indexCount = cubeIndices.length;
};
CubeIsland.prototype.render = function (projectionMat, modelViewMat, stats) {
var gl = this.gl;
var program = this.program;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.enableVertexAttribArray(program.attrib.normal);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 32, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 32, 12);
gl.vertexAttribPointer(program.attrib.normal, 3, gl.FLOAT, false, 32, 20);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, 1.5, -this.depth * 0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.5, 0.5, 0.5]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
stats.render(projectionMat, this.statsMat);
}
};
return CubeIsland;
})();

View File

@ -0,0 +1,422 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRCubeSea = (function () {
"use strict";
var cubeSeaVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"uniform mat3 normalMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"attribute vec4 color;",
"attribute vec3 normal;",
"varying vec2 vTexCoord;",
"varying vec4 vColor;",
"varying vec3 vLight;",
"const vec3 lightDir = vec3(0.75, 0.5, 1.0);",
"const vec3 ambientColor = vec3(0.5, 0.5, 0.5);",
"const vec3 lightColor = vec3(0.75, 0.75, 0.75);",
"void main() {",
" vec3 normalRotated = normalMat * normal;",
//" float lightFactor = max(dot(normalize(lightDir), normalRotated), 0.0);",
//" vLight = ambientColor + (lightColor * lightFactor);",
" vTexCoord = texCoord;",
" vColor = color;",
" gl_Position = modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var cubeSeaFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"varying vec4 vColor;",
"varying vec3 vLight;",
"void main() {",
//" gl_FragColor = vec4(vLight, 1.0) * texture2D(diffuse, vTexCoord);",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
//" gl_FragColor = vec4(0.5, 0.9, 0.5, 1.0);",
//" gl_FragColor = vColor;",
"}",
].join("\n");
// Used when we want to stress the GPU a bit more.
// Stolen with love from https://www.clicktorelease.com/code/codevember-2016/4/
var heavyCubeSeaFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"varying vec3 vLight;",
"vec2 dimensions = vec2(64, 64);",
"float seed = 0.42;",
"vec2 hash( vec2 p ) {",
" p=vec2(dot(p,vec2(127.1,311.7)),dot(p,vec2(269.5,183.3)));",
" return fract(sin(p)*18.5453);",
"}",
"vec3 hash3( vec2 p ) {",
" vec3 q = vec3( dot(p,vec2(127.1,311.7)),",
" dot(p,vec2(269.5,183.3)),",
" dot(p,vec2(419.2,371.9)) );",
" return fract(sin(q)*43758.5453);",
"}",
"float iqnoise( in vec2 x, float u, float v ) {",
" vec2 p = floor(x);",
" vec2 f = fract(x);",
" float k = 1.0+63.0*pow(1.0-v,4.0);",
" float va = 0.0;",
" float wt = 0.0;",
" for( int j=-2; j<=2; j++ )",
" for( int i=-2; i<=2; i++ ) {",
" vec2 g = vec2( float(i),float(j) );",
" vec3 o = hash3( p + g )*vec3(u,u,1.0);",
" vec2 r = g - f + o.xy;",
" float d = dot(r,r);",
" float ww = pow( 1.0-smoothstep(0.0,1.414,sqrt(d)), k );",
" va += o.z*ww;",
" wt += ww;",
" }",
" return va/wt;",
"}",
"// return distance, and cell id",
"vec2 voronoi( in vec2 x ) {",
" vec2 n = floor( x );",
" vec2 f = fract( x );",
" vec3 m = vec3( 8.0 );",
" for( int j=-1; j<=1; j++ )",
" for( int i=-1; i<=1; i++ ) {",
" vec2 g = vec2( float(i), float(j) );",
" vec2 o = hash( n + g );",
" vec2 r = g - f + (0.5+0.5*sin(seed+6.2831*o));",
" float d = dot( r, r );",
" if( d<m.x )",
" m = vec3( d, o );",
" }",
" return vec2( sqrt(m.x), m.y+m.z );",
"}",
"void main() {",
" vec2 uv = ( vTexCoord );",
" uv *= vec2( 10., 10. );",
" uv += seed;",
" vec2 p = 0.5 - 0.5*sin( 0.*vec2(1.01,1.71) );",
" vec2 c = voronoi( uv );",
" vec3 col = vec3( c.y / 2. );",
" float f = iqnoise( 1. * uv + c.y, p.x, p.y );",
" col *= 1.0 + .25 * vec3( f );",
" gl_FragColor = vec4(vLight, 1.0) * texture2D(diffuse, vTexCoord) * vec4( col, 1. );",
"}"
].join("\n");
var CubeSea = function (gl, texture, gridSize, cubeScale, heavy, halfOnly, autorotate) {
this.gl = gl;
if (!gridSize) {
gridSize = 10;
}
this.statsMat = mat4.create();
this.normalMat = mat3.create();
this.heroRotationMat = mat4.create();
this.heroModelViewMat = mat4.create();
this.autoRotationMat = mat4.create();
this.cubesModelViewMat = mat4.create();
this.texture = texture;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(cubeSeaVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(heavy ? heavyCubeSeaFS :cubeSeaFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1,
color: 2,
normal: 3
});
this.program.link();
this.autorotate = autorotate;
var cubeVerts = [];
var cubeIndices = [];
// Build a single cube.
function appendCube (x, y, z, size) {
if (!size) size = 0.2;
if (cubeScale) size *= cubeScale;
// Bottom
var idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
// X Y Z U V R G B A NX NY NZ
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, -1.0, 0.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, -1.0, 0.0);
// Top
idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0);
// Left
idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, -1.0, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z - size, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, -1.0, 0.0, 0.0);
cubeVerts.push(x - size, y + size, z + size, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0);
cubeVerts.push(x - size, y - size, z + size, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0);
// Right
idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x + size, y - size, z - size, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0);
cubeVerts.push(x + size, y + size, z - size, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0);
cubeVerts.push(x + size, y + size, z + size, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0);
cubeVerts.push(x + size, y - size, z + size, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0);
// Back
idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 2, idx + 1);
cubeIndices.push(idx, idx + 3, idx + 2);
cubeVerts.push(x - size, y - size, z - size, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, -1.0);
cubeVerts.push(x + size, y - size, z - size, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, -1.0);
cubeVerts.push(x + size, y + size, z - size, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, -1.0);
cubeVerts.push(x - size, y + size, z - size, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, -1.0);
// Front
idx = cubeVerts.length / 12.0;
cubeIndices.push(idx, idx + 1, idx + 2);
cubeIndices.push(idx, idx + 2, idx + 3);
cubeVerts.push(x - size, y - size, z + size, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0);
cubeVerts.push(x + size, y - size, z + size, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0);
cubeVerts.push(x + size, y + size, z + size, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0);
cubeVerts.push(x - size, y + size, z + size, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0);
}
// Build the cube sea
var N = 10;
for(var i = 0; i < N; i++){
var theta = 2 * Math.PI * i / N;
//appendCube(5*Math.cos(theta), 0, 5*Math.sin(theta), 1.0);
}
appendCube(0, 0, 0, 0.5);
for (var x = 0; x < gridSize; ++x) {
for (var y = 0; y < gridSize; ++y) {
for (var z = 0; z < gridSize; ++z) {
//appendCube(x - (gridSize / 2), y - (gridSize / 2), z - (gridSize / 2));
}
}
}
this.indexCount = cubeIndices.length;
// Add some "hero cubes" for separate animation.
this.heroOffset = cubeIndices.length;
appendCube(0, 0.25, -0.8, 0.05);
appendCube(0.8, 0.25, 0, 0.05);
appendCube(0, 0.25, 0.8, 0.05);
appendCube(-0.8, 0.25, 0, 0.05);
this.heroCount = cubeIndices.length - this.heroOffset;
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeIndices), gl.STATIC_DRAW);
this.boardVert = [];
var boardSize = 20.0;
// left top
// X Y Z U V R G B A NX NY NZ
this.boardVert.push(0.0, 10.0, -boardSize, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
// left bottom
this.boardVert.push(0.0, 0.0, -boardSize, 0.0, 0.5, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
// right bottom
this.boardVert.push(0.0, 0.0, boardSize, 1.0, 0.5, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
// left top
// X Y Z U V R G B A NX NY NZ
this.boardVert.push(0.0, 10.0, -boardSize, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
// right bottom
this.boardVert.push(0.0, 0.0, boardSize, 1.0, 0.5, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
// right top
this.boardVert.push(0.0, 10.0, boardSize, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, -1.0, 0.0);
this.boardVertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.boardVertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(this.boardVert), gl.STATIC_DRAW);
this.textureCanvas = document.createElement("canvas");
this.textureCanvas.width = 256;
this.textureCanvas.height = 256;
this.canvasTexture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.canvasTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, this.textureCanvas);
};
CubeSea.prototype.render = function (projectionMat, modelViewMat, stats, timestamp, orientation) {
var gl = this.gl;
var program = this.program;
program.use();
//if (this.autorotate && timestamp) {
// mat4.fromRotation(this.autoRotationMat, timestamp / 500, [0, -1, 0]);
// mat4.multiply(this.cubesModelViewMat, modelViewMat, this.autoRotationMat);
// mat3.fromMat4(this.normalMat, this.autoRotationMat);
//} else {
this.cubesModelViewMat = modelViewMat;
mat3.identity(this.normalMat);
//}
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix3fv(program.uniform.normalMat, false, this.normalMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.enableVertexAttribArray(program.attrib.color);
gl.enableVertexAttribArray(program.attrib.normal);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 48, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 48, 12);
gl.vertexAttribPointer(program.attrib.color, 4, gl.FLOAT, false, 48, 20);
gl.vertexAttribPointer(program.attrib.normal, 3, gl.FLOAT, false, 48, 36);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
var N = 10;
for(var i = 0; i < N; i++){
var theta = 2 * Math.PI * i / N;
var mm = mat4.create();
mat4.translate(mm, mm, [5 * Math.cos(theta), 0, 5* Math.sin(theta)]);
var tran = mat4.clone(mm);
mat4.mul(mm, modelViewMat, mm);
mat4.mul(mm, projectionMat, mm);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, mm);
if(i == 0){
console.log(theta);
console.log("rotate:");
console.log(orientation);
console.log(modelViewMat);
//console.log(projectionMat);
console.log(mm);
}
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
}
var mm = mat4.create();
mat4.translate(mm, mm, [20, 3, 0]);
var rot = mat4.create();
mat4.rotate(rot, rot, Math.PI / 2, [0, 1, 0]);
mat4.mul(mm, rot, mm);
mat4.mul(mm, modelViewMat, mm);
mat4.mul(mm, projectionMat, mm);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, mm);
var context = this.textureCanvas.getContext("2d");
context.save();
context.fillRect(0, 0, this.textureCanvas.width, this.textureCanvas.height);
context.fillStyle = '#FFFFFF';
context.font = "bold 20px 'Arial'";
context.textAlign = 'left';
context.textBaseline = 'middle';
if(orientation){
context.fillText(orientation[0] + "", 0, 20);
context.fillText(orientation[1] + "", 0, 40);
context.fillText(orientation[2] + "", 0, 60);
context.fillText(orientation[3] + "", 0, 80);
}else{
context.fillText("Hello world!", 0, 40);
}
context.restore();
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.canvasTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, this.textureCanvas);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindBuffer(gl.ARRAY_BUFFER, this.boardVertBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.enableVertexAttribArray(program.attrib.color);
gl.enableVertexAttribArray(program.attrib.normal);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 48, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 48, 12);
gl.vertexAttribPointer(program.attrib.color, 4, gl.FLOAT, false, 48, 20);
gl.vertexAttribPointer(program.attrib.normal, 3, gl.FLOAT, false, 48, 36);
gl.drawArrays(gl.TRIANGLES, 0, this.boardVert.length / 12);
if (timestamp) {
mat4.fromRotation(this.heroRotationMat, timestamp / 2000, [0, 1, 0]);
mat4.multiply(this.heroModelViewMat, modelViewMat, this.heroRotationMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, this.heroModelViewMat);
// We know that the additional model matrix is a pure rotation,
// so we can just use the non-position parts of the matrix
// directly, this is cheaper than the transpose+inverse that
// normalFromMat4 would do.
mat3.fromMat4(this.normalMat, this.heroRotationMat);
gl.uniformMatrix3fv(program.uniform.normalMat, false, this.normalMat);
//gl.drawElements(gl.TRIANGLES, this.heroCount, gl.UNSIGNED_SHORT, this.heroOffset * 2);
}
if (stats) {
// To ensure that the FPS counter is visible in VR mode we have to
// render it as part of the scene.
mat4.fromTranslation(this.statsMat, [0, -0.3, -0.5]);
mat4.scale(this.statsMat, this.statsMat, [0.3, 0.3, 0.3]);
mat4.rotateX(this.statsMat, this.statsMat, -0.75);
mat4.multiply(this.statsMat, modelViewMat, this.statsMat);
//stats.render(projectionMat, this.statsMat);
}
};
return CubeSea;
})();

View File

@ -0,0 +1,243 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/* global mat4, WGLUProgram */
window.VRPanorama = (function () {
"use strict";
var panoVS = [
"uniform mat4 projectionMat;",
"uniform mat4 modelViewMat;",
"attribute vec3 position;",
"attribute vec2 texCoord;",
"varying vec2 vTexCoord;",
"void main() {",
" vTexCoord = texCoord;",
" gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );",
"}",
].join("\n");
var panoFS = [
"precision mediump float;",
"uniform sampler2D diffuse;",
"varying vec2 vTexCoord;",
"void main() {",
" gl_FragColor = texture2D(diffuse, vTexCoord);",
"}",
].join("\n");
var Panorama = function (gl) {
this.gl = gl;
this.texture = gl.createTexture();
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(panoVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(panoFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
var panoVerts = [];
var panoIndices = [];
var radius = 2; // 2 meter radius sphere
var latSegments = 40;
var lonSegments = 40;
// Create the vertices
for (var i=0; i <= latSegments; ++i) {
var theta = i * Math.PI / latSegments;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var j=0; j <= lonSegments; ++j) {
var phi = j * 2 * Math.PI / lonSegments;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = sinPhi * sinTheta;
var y = cosTheta;
var z = -cosPhi * sinTheta;
var u = (j / lonSegments);
var v = (i / latSegments);
panoVerts.push(x * radius, y * radius, z * radius, u, v);
}
}
// Create the indices
for (var i = 0; i < latSegments; ++i) {
var offset0 = i * (lonSegments+1);
var offset1 = (i+1) * (lonSegments+1);
for (var j = 0; j < lonSegments; ++j) {
var index0 = offset0+j;
var index1 = offset1+j;
panoIndices.push(
index0, index1, index0+1,
index1, index1+1, index0+1
);
}
}
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(panoVerts), gl.STATIC_DRAW);
this.indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(panoIndices), gl.STATIC_DRAW);
this.indexCount = panoIndices.length;
this.imgElement = null;
this.videoElement = null;
};
Panorama.prototype.setImage = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var img = new Image();
img.addEventListener('load', function() {
self.imgElement = img;
self.videoElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.imgElement);
});
img.addEventListener('error', function(ev) {
console.error(ev.message);
reject(ev.message);
}, false);
img.crossOrigin = 'anonymous';
img.src = url;
});
};
Panorama.prototype.setVideo = function (url) {
var gl = this.gl;
var self = this;
return new Promise(function(resolve, reject) {
var video = document.createElement('video');
video.addEventListener('canplay', function() {
// Added "click to play" UI?
});
video.addEventListener('playing', function() {
self.videoElement = video;
self.imgElement = null;
gl.bindTexture(gl.TEXTURE_2D, self.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, self.videoElement);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
resolve(self.videoElement);
});
video.addEventListener('error', function(ev) {
console.error(video.error);
reject(video.error);
}, false);
// Videos must be muted to play without a user gesture.
video.muted = true;
// These lines are required to play the video on iOS.
video.setAttribute("playsinline", "");
// This is for iOS 8 and 9 only, above line required for 10+.
video.setAttribute("webkit-playsinline", "");
video.loop = true;
video.crossOrigin = 'anonymous';
video.src = url;
// As the video is never visible on the page, we must explicitly
// call play to start the video instead of being able to use
// autoplay attributes.
playVideo(video);
});
};
// Start the video. If the video fails to start, alert the user.
Panorama.prototype.play = function() {
if (this.videoElement)
playVideo(this.videoElement);
};
function playVideo(video) {
let promise = video.play();
if(promise) {
promise.catch((err) => {
console.error(err);
VRSamplesUtil.addError("Video has failed to start", 3000)
});
} else {
console.error("videoElement.play does not support promise api");
}
};
Panorama.prototype.pause = function() {
if (this.videoElement)
this.videoElement.pause();
};
Panorama.prototype.isPaused = function() {
if (this.videoElement)
return this.videoElement.paused;
return false;
};
Panorama.prototype.render = function (projectionMat, modelViewMat) {
var gl = this.gl;
var program = this.program;
if (!this.imgElement && !this.videoElement)
return;
program.use();
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, modelViewMat);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indexBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
if (this.videoElement && !this.videoElement.paused) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.videoElement);
}
gl.drawElements(gl.TRIANGLES, this.indexCount, gl.UNSIGNED_SHORT, 0);
};
return Panorama;
})();

View File

@ -0,0 +1,227 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
window.VRSamplesUtil = (function () {
"use strict";
// Lifted from the WebVR Polyfill
function isMobile () {
return /Android/i.test(navigator.userAgent) ||
/iPhone|iPad|iPod/i.test(navigator.userAgent);
}
function getMessageContainer () {
var messageContainer = document.getElementById("vr-sample-message-container");
if (!messageContainer) {
messageContainer = document.createElement("div");
messageContainer.id = "vr-sample-message-container";
messageContainer.style.fontFamily = "sans-serif";
messageContainer.style.position = "absolute";
messageContainer.style.zIndex = "999";
messageContainer.style.left = "0";
messageContainer.style.top = "0";
messageContainer.style.right = "0";
messageContainer.style.margin = "0";
messageContainer.style.padding = "0";
messageContainer.align = "center";
document.body.appendChild(messageContainer);
}
return messageContainer;
}
function addMessageElement (message, backgroundColor) {
var messageElement = document.createElement("div");
messageElement.classList.add = "vr-sample-message";
messageElement.style.color = "#FFF";
messageElement.style.backgroundColor = backgroundColor;
messageElement.style.borderRadius = "3px";
messageElement.style.position = "relative";
messageElement.style.display = "inline-block";
messageElement.style.margin = "0.5em";
messageElement.style.padding = "0.75em";
messageElement.innerHTML = message;
getMessageContainer().appendChild(messageElement);
return messageElement;
}
// Makes the given element fade out and remove itself from the DOM after the
// given timeout.
function makeToast (element, timeout) {
element.style.transition = "opacity 0.5s ease-in-out";
element.style.opacity = "1";
setTimeout(function () {
element.style.opacity = "0";
setTimeout(function () {
if (element.parentElement)
element.parentElement.removeChild(element);
}, 500);
}, timeout);
}
function addError (message, timeout) {
var element = addMessageElement("<b>ERROR:</b> " + message, "#D33");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function addInfo (message, timeout) {
var element = addMessageElement(message, "#22A");
if (timeout) {
makeToast(element, timeout);
}
return element;
}
function getButtonContainer () {
var buttonContainer = document.getElementById("vr-sample-button-container");
if (!buttonContainer) {
buttonContainer = document.createElement("div");
buttonContainer.id = "vr-sample-button-container";
buttonContainer.style.fontFamily = "sans-serif";
buttonContainer.style.position = "absolute";
buttonContainer.style.zIndex = "999";
buttonContainer.style.left = "0";
buttonContainer.style.bottom = "0";
buttonContainer.style.right = "0";
buttonContainer.style.margin = "0";
buttonContainer.style.padding = "0";
buttonContainer.align = "right";
document.body.appendChild(buttonContainer);
}
return buttonContainer;
}
function addButtonElement (message, key, icon) {
var buttonElement = document.createElement("div");
buttonElement.classList.add = "vr-sample-button";
buttonElement.style.color = "#FFF";
buttonElement.style.fontWeight = "bold";
buttonElement.style.backgroundColor = "#888";
buttonElement.style.borderRadius = "5px";
buttonElement.style.border = "3px solid #555";
buttonElement.style.position = "relative";
buttonElement.style.display = "inline-block";
buttonElement.style.margin = "0.5em";
buttonElement.style.padding = "0.75em";
buttonElement.style.cursor = "pointer";
buttonElement.align = "center";
if (icon) {
buttonElement.innerHTML = "<img src='" + icon + "'/><br/>" + message;
} else {
buttonElement.innerHTML = message;
}
if (key) {
var keyElement = document.createElement("span");
keyElement.classList.add = "vr-sample-button-accelerator";
keyElement.style.fontSize = "0.75em";
keyElement.style.fontStyle = "italic";
keyElement.innerHTML = " (" + key + ")";
buttonElement.appendChild(keyElement);
}
getButtonContainer().appendChild(buttonElement);
return buttonElement;
}
function addButton (message, key, icon, callback) {
var keyListener = null;
if (key) {
var keyCode = key.charCodeAt(0);
keyListener = function (event) {
if (event.keyCode === keyCode) {
callback(event);
}
};
document.addEventListener("keydown", keyListener, false);
}
var element = addButtonElement(message, key, icon);
element.addEventListener("click", function (event) {
callback(event);
event.preventDefault();
}, false);
return {
element: element,
keyListener: keyListener
};
}
function removeButton (button) {
if (!button)
return;
if (button.element.parentElement)
button.element.parentElement.removeChild(button.element);
if (button.keyListener)
document.removeEventListener("keydown", button.keyListener, false);
}
function addVRClickListener(clickCallback) {
let lastButtonState = [];
let presentingDisplay = null;
// Set up a loop to check gamepad state while any VRDisplay is presenting.
function onClickListenerFrame() {
// Only reschedule the loop if a display is still presenting.
if (presentingDisplay && presentingDisplay.isPresenting) {
presentingDisplay.requestAnimationFrame(onClickListenerFrame);
}
let gamepads = navigator.getGamepads();
for (let i = 0; i < gamepads.length; ++i) {
let gamepad = gamepads[i];
// Ensure the gamepad is valid and has buttons.
if (gamepad &&
gamepad.buttons.length) {
let lastState = lastButtonState[i] || false;
let newState = gamepad.buttons[0].pressed;
// If the primary button state has changed from not pressed to pressed
// over the last frame then fire the callback.
if (newState && !lastState) {
clickCallback(gamepad);
}
lastButtonState[i] = newState;
}
}
}
window.addEventListener('vrdisplaypresentchange', (event) => {
// When using the polyfill, CustomEvents require event properties to
// be attached to the `detail` property; native implementations
// are able to attach `display` directly on the event.
var display = event.detail ? event.detail.display : event.display;
if (display.isPresenting) {
let scheduleFrame = !presentingDisplay;
presentingDisplay = display;
if (scheduleFrame)
onClickListenerFrame();
} else if (presentingDisplay == display) {
presentingDisplay = null;
}
});
}
return {
isMobile: isMobile,
addError: addError,
addInfo: addInfo,
addButton: addButton,
removeButton: removeButton,
makeToast: makeToast,
addVRClickListener: addVRClickListener
};
})();

103
test/webvr/js/vr-splash.js Normal file
View File

@ -0,0 +1,103 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
class VRSplashScreen{
constructor(gl, texture, stereo) {
const splashVS = `
uniform mat4 projectionMat;
uniform mat4 modelViewMat;
uniform vec4 texCoordScaleOffset;
attribute vec3 position;
attribute vec2 texCoord;
varying vec2 vTexCoord;
void main() {
vTexCoord = (texCoord * texCoordScaleOffset.xy) + texCoordScaleOffset.zw;
gl_Position = projectionMat * modelViewMat * vec4( position, 1.0 );
}
`;
const splashFS = `
precision mediump float;
uniform sampler2D diffuse;
varying vec2 vTexCoord;
void main() {
gl_FragColor = texture2D(diffuse, vTexCoord);
}
`;
this.gl = gl;
this.modelViewMat = mat4.create();
this.texture = texture;
this.stereo = stereo;
this.program = new WGLUProgram(gl);
this.program.attachShaderSource(splashVS, gl.VERTEX_SHADER);
this.program.attachShaderSource(splashFS, gl.FRAGMENT_SHADER);
this.program.bindAttribLocation({
position: 0,
texCoord: 1
});
this.program.link();
let splashVerts = [];
let size = 0.4;
// X Y Z U V
splashVerts.push(-size, -size, 0.0, 0.0, 1.0);
splashVerts.push( size, -size, 0.0, 1.0, 1.0);
splashVerts.push( size, size, 0.0, 1.0, 0.0);
splashVerts.push(-size, -size, 0.0, 0.0, 1.0);
splashVerts.push( size, size, 0.0, 1.0, 0.0);
splashVerts.push(-size, size, 0.0, 0.0, 0.0);
this.vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(splashVerts), gl.STATIC_DRAW);
}
render(projectionMat, eye) {
var gl = this.gl;
var program = this.program;
program.use();
// We're going to just completely ignore the view matrix in this case,
// because we want to render directly in front of the users face no matter
// where they are looking.
mat4.identity(this.modelViewMat);
mat4.translate(this.modelViewMat, this.modelViewMat, [0, 0, -1]);
gl.uniformMatrix4fv(program.uniform.projectionMat, false, projectionMat);
gl.uniformMatrix4fv(program.uniform.modelViewMat, false, this.modelViewMat);
if (this.stereo) {
if (eye == "left") {
gl.uniform4f(program.uniform.texCoordScaleOffset, 0.5, 1.0, 0.0, 0.0);
} else if (eye == "right") {
gl.uniform4f(program.uniform.texCoordScaleOffset, 0.5, 1.0, 0.5, 0.0)
}
} else {
gl.uniform4f(program.uniform.texCoordScaleOffset, 1.0, 1.0, 0.0, 0.0);
}
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertBuffer);
gl.enableVertexAttribArray(program.attrib.position);
gl.enableVertexAttribArray(program.attrib.texCoord);
gl.vertexAttribPointer(program.attrib.position, 3, gl.FLOAT, false, 20, 0);
gl.vertexAttribPointer(program.attrib.texCoord, 2, gl.FLOAT, false, 20, 12);
gl.activeTexture(gl.TEXTURE0);
gl.uniform1i(this.program.uniform.diffuse, 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 788 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB