Android JavaCV FFmpeg webstream 到本地静态网站
Android JavaCV FFmpeg webstream to local static website
对于我的集成测试,我正在开发一个需要向本地托管网站提供实时流的应用程序。我已经在 nanohttpd 上建立了一个 运行 的工作站点。此应用程序还执行特殊的图像处理。因此我使用 JavaCV。该库运行良好,所有 cpp 绑定也都在运行。
我的问题:如何设置可以在nanohttpd托管的静态站点中直接播放的直播? - 我走对了?
我的代码:
初始化:
private void initLiveStream() throws FrameRecorder.Exception {
/* ~~~ https://github.com/bytedeco/javacv/issues/598 ~~~ */
frameRecorder = new FFmpegFrameRecorder("http://localhost:9090", imageWidth, imageHeight, 0);
frameRecorder.setVideoOption("preset", "ultrafast");
frameRecorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
frameRecorder.setAudioCodec(0);
frameRecorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
frameRecorder.setFormat("webm");
frameRecorder.setGopSize(10);
frameRecorder.setFrameRate(frameRate);
frameRecorder.setVideoBitrate(5000);
frameRecorder.setOption("content_type","video/webm");
frameRecorder.setOption("listen", "1");
frameRecorder.start();
}
在我的 CameraView 中:
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Size size = camera.getParameters().getPreviewSize();
Frame frame = new AndroidFrameConverter().convert(data, size.width, size.height);
try {
if(frameRecorder!=null){
frameRecorder.record(frame);
}
} catch (FrameRecorder.Exception e) {
e.printStackTrace();
}
}
这是我在搜索解决方案时经常显示的堆栈跟踪之一:
org.bytedeco.javacv.FrameRecorder$Exception: avio_open error() error -111: Could not open 'http://localhost:9090'
我找不到解决此特定问题的任何其他讨论帖。
提前致谢
编辑
感谢 Chester Cobus,这是我使用的代码:
Websocket:
//Constructor
AsyncHttpServer serverStream = new AsyncHttpServer();
List<WebSocket> sockets = new ArrayList<>();
//
//I'm planning to use more sockets. This is the only uniform expression I found.
serverStream.websocket("/((?:[^/]*/)*)(.*)", new AsyncHttpServer.WebSocketRequestCallback() {
@Override
public void onConnected(final WebSocket webSocket, AsyncHttpServerRequest request) {
String uri = request.getPath();
if (uri.equals("/live")) {
sockets.add(webSocket);
//Use this to clean up any references to your websocket
webSocket.setClosedCallback(new CompletedCallback() {
@Override
public void onCompleted(Exception ex) {
try {
if (ex != null)
Log.e("WebSocket", "Error");
} finally {
sockets.remove(webSocket);
}
}
});
}
}
});
//Updater (Observer pattern)
@Override
public void updated(byte[] data) {
for (WebSocket socket : sockets) {
socket.write(new ByteBufferList(data));
}
}
记录活动
private long start_time = System.currentTimeMillis();
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
long now_time = System.currentTimeMillis();
if ((now_time - start_time) > 250) {
start_time = now_time;
//https://forums.xamarin.com/discussion/40991/onpreviewframe-issue-converting-preview-byte-to-android-graphics-bitmap
Camera.Size size = camera.getParameters().getPreviewSize();
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 60, byteArrayOutputStream);
MainActivity.getWebStreamer().updated(byteArrayOutputStream.toByteArray());
}
}
JavaScript
var socket;
var imageElement;
/**
* path - String.Format("ws://{0}:8090/live", Window.Location.HostName)
* image - HTMLImageElement
*/
function imageStreamer(path, image) {
imageElement = image;
socket = new WebSocket(path);
socket.onmessage = function(msg) {
var arrayBuffer = msg.data;
var reader = new FileReader();
reader.onload = function(e) {
imageElement.src = e.target.result;
};
reader.readAsDataURL(arrayBuffer);
};
}
以下是 Web 套接字实现的示例:
//This code must run just before Camera is opened.
AsyncHttpServer server = new AsyncHttpServer();
server.websocket("/live","ws", new WebSocketRequestCallback() {
@Override
public void onConnected(final WebSocket webSocket, AsyncHttpServerRequest request) {
this.webSocket = webSocket //webSocket make it an instance variable
}
});
//listen on port 5000
server.listen(5000);
//browsing ws://{IP Address assigned by wifi}:5000/live
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
this.websocket.send(data);
}
使用Gradle获取上面的库
dependencies {
compile 'com.koushikdutta.async:androidasync:2.+'
}
这是您网站的客户端代码:
var socket = new WebSocket('ws://{IP Address assigned by wifi}:5000/live', ['soap', 'xmpp']);
socket.onmessage = function(msg) {
var arrayBuffer = msg.data;
var image = document.getElementById('image'); //<img id="image" /> in HTML
var reader = new FileReader();
reader.onload = function(e) {
image.src = e.target.result;
};
reader.readAsDataURL(arrayBuffer);
};
对于我的集成测试,我正在开发一个需要向本地托管网站提供实时流的应用程序。我已经在 nanohttpd 上建立了一个 运行 的工作站点。此应用程序还执行特殊的图像处理。因此我使用 JavaCV。该库运行良好,所有 cpp 绑定也都在运行。
我的问题:如何设置可以在nanohttpd托管的静态站点中直接播放的直播? - 我走对了?
我的代码:
初始化:
private void initLiveStream() throws FrameRecorder.Exception {
/* ~~~ https://github.com/bytedeco/javacv/issues/598 ~~~ */
frameRecorder = new FFmpegFrameRecorder("http://localhost:9090", imageWidth, imageHeight, 0);
frameRecorder.setVideoOption("preset", "ultrafast");
frameRecorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
frameRecorder.setAudioCodec(0);
frameRecorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
frameRecorder.setFormat("webm");
frameRecorder.setGopSize(10);
frameRecorder.setFrameRate(frameRate);
frameRecorder.setVideoBitrate(5000);
frameRecorder.setOption("content_type","video/webm");
frameRecorder.setOption("listen", "1");
frameRecorder.start();
}
在我的 CameraView 中:
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Camera.Size size = camera.getParameters().getPreviewSize();
Frame frame = new AndroidFrameConverter().convert(data, size.width, size.height);
try {
if(frameRecorder!=null){
frameRecorder.record(frame);
}
} catch (FrameRecorder.Exception e) {
e.printStackTrace();
}
}
这是我在搜索解决方案时经常显示的堆栈跟踪之一:
org.bytedeco.javacv.FrameRecorder$Exception: avio_open error() error -111: Could not open 'http://localhost:9090'
我找不到解决此特定问题的任何其他讨论帖。
提前致谢
编辑
感谢 Chester Cobus,这是我使用的代码:
Websocket:
//Constructor
AsyncHttpServer serverStream = new AsyncHttpServer();
List<WebSocket> sockets = new ArrayList<>();
//
//I'm planning to use more sockets. This is the only uniform expression I found.
serverStream.websocket("/((?:[^/]*/)*)(.*)", new AsyncHttpServer.WebSocketRequestCallback() {
@Override
public void onConnected(final WebSocket webSocket, AsyncHttpServerRequest request) {
String uri = request.getPath();
if (uri.equals("/live")) {
sockets.add(webSocket);
//Use this to clean up any references to your websocket
webSocket.setClosedCallback(new CompletedCallback() {
@Override
public void onCompleted(Exception ex) {
try {
if (ex != null)
Log.e("WebSocket", "Error");
} finally {
sockets.remove(webSocket);
}
}
});
}
}
});
//Updater (Observer pattern)
@Override
public void updated(byte[] data) {
for (WebSocket socket : sockets) {
socket.write(new ByteBufferList(data));
}
}
记录活动
private long start_time = System.currentTimeMillis();
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
long now_time = System.currentTimeMillis();
if ((now_time - start_time) > 250) {
start_time = now_time;
//https://forums.xamarin.com/discussion/40991/onpreviewframe-issue-converting-preview-byte-to-android-graphics-bitmap
Camera.Size size = camera.getParameters().getPreviewSize();
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 60, byteArrayOutputStream);
MainActivity.getWebStreamer().updated(byteArrayOutputStream.toByteArray());
}
}
JavaScript
var socket;
var imageElement;
/**
* path - String.Format("ws://{0}:8090/live", Window.Location.HostName)
* image - HTMLImageElement
*/
function imageStreamer(path, image) {
imageElement = image;
socket = new WebSocket(path);
socket.onmessage = function(msg) {
var arrayBuffer = msg.data;
var reader = new FileReader();
reader.onload = function(e) {
imageElement.src = e.target.result;
};
reader.readAsDataURL(arrayBuffer);
};
}
以下是 Web 套接字实现的示例:
//This code must run just before Camera is opened.
AsyncHttpServer server = new AsyncHttpServer();
server.websocket("/live","ws", new WebSocketRequestCallback() {
@Override
public void onConnected(final WebSocket webSocket, AsyncHttpServerRequest request) {
this.webSocket = webSocket //webSocket make it an instance variable
}
});
//listen on port 5000
server.listen(5000);
//browsing ws://{IP Address assigned by wifi}:5000/live
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
this.websocket.send(data);
}
使用Gradle获取上面的库
dependencies {
compile 'com.koushikdutta.async:androidasync:2.+'
}
这是您网站的客户端代码:
var socket = new WebSocket('ws://{IP Address assigned by wifi}:5000/live', ['soap', 'xmpp']);
socket.onmessage = function(msg) {
var arrayBuffer = msg.data;
var image = document.getElementById('image'); //<img id="image" /> in HTML
var reader = new FileReader();
reader.onload = function(e) {
image.src = e.target.result;
};
reader.readAsDataURL(arrayBuffer);
};