在 Flutter 中显示来自来自 websocket 的字节的实时视频
Display Live Video from bytes coming from websocket in Flutter
我正在做一个项目,我想将来自外部摄像头的实时视频显示到 Flutter 应用程序。我正在使用 websockets 来尝试实现这一点。
这是python
中制作的websocket服务器的代码
import websockets
import asyncio
import mediapipe as mp
import cv2, struct, pickle, base64
mp_face_detection = mp.solutions.face_detection
mp_draw = mp.solutions.drawing_utils
face_detection = mp_face_detection.FaceDetection(min_detection_confidence=0.7)
port = 5000
def draw_bbox(res, frame):
for id, det in enumerate(res.detections):
# mp_draw.draw_detection(frame, det) #? Direct Method for drawing bounding box and feature points
coord = det.location_data.relative_bounding_box
ih, iw, ic = frame.shape
bbox = int(coord.xmin * iw), int(coord.ymin * ih), \
int(coord.width * iw), int(coord.height * ih)
cv2.rectangle(frame, bbox, (255, 0, 255), 2)
cv2.putText(
frame,
f'{int(det.score[0]*100)}%',
(bbox[0], bbox[1] - 20),
cv2.FONT_HERSHEY_PLAIN,
2,
(0, 255, 0),
2
)
print("Started server on port : ", port)
async def transmit(websocket, path):
print("Client Connected !")
try :
cap = cv2.VideoCapture(0)
while cap.isOpened():
img, frame = cap.read()
rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
res = face_detection.process(rgb)
if res.detections:
draw_bbox(res, frame)
a = pickle.dumps(frame)
msg = struct.pack("Q", len(a)) + a
await websocket.send(msg)
# cv2.imshow("Transimission", frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
except websockets.connection.ConnectionClosed as e:
print("Client Disconnected !")
cap.release()
# except:
# print("Someting went Wrong !")
start_server = websockets.serve(transmit, port=port)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
cap.release()
这工作正常并在其他 python 客户端上发送以字节编码的图像帧
这是 Flutter 的代码,我想在其中实时读取和显示这些帧
class _MainPageState extends State<MainPage> {
static const String url = "ws://<network_ipv4>:5000";
WebSocketChannel _channel = WebSocketChannel.connect(Uri.parse(url));
void _connectToWebsocket() {
_channel = WebSocketChannel.connect(Uri.parse(url));
}
void _disconnectToWebsocket() {
_channel.sink.close();
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text("Live Video"),
),
body: Padding(
padding: const EdgeInsets.all(20.0),
child: Column(
children: [
Row(
children: [
ElevatedButton(
onPressed: _connectToWebsocket,
child: const Text("Force Connection")),
const SizedBox(
width: 120.0,
),
ElevatedButton(
onPressed: _disconnectToWebsocket,
child: const Text("Disconnect")),
],
),
StreamBuilder(
stream: _channel.stream,
builder: (context, snapshot) {
return snapshot.hasData
? Image.memory(
base64Decode(snapshot.data.toString()),
)
: const Center(
child: Text("No Data"),
);
},
)
],
),
),
),
);
}
}
请帮帮我
我已经解决了这个问题。问题在于,在转换为 bas64 字符串后,python 解释器将字符串添加到 b'' 中。因此 dart 编译器无法理解它。这是工作服务器代码
import websockets
import asyncio
import cv2, base64
print("Started server on port : ", port)
async def transmit(websocket, path):
print("Client Connected !")
try :
cap = cv2.VideoCapture(0)
while cap.isOpened():
_, frame = cap.read()
encoded = cv2.imencode('.jpg', frame)[1]
data = str(base64.b64encode(encoded))
data = data[2:len(data)-1]
await websocket.send(data)
cv2.imshow("Transimission", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except websockets.connection.ConnectionClosed as e:
print("Client Disconnected !")
start_server = websockets.serve(transmit, port=port)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
和 Flutter 应用程序客户端代码,如果有人想构建类似的应用程序
import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:patrolling_robot/src/styles/styles.dart';
import 'package:web_socket_channel/io.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
class MainPage extends StatefulWidget {
const MainPage({Key? key}) : super(key: key);
@override
State<MainPage> createState() => _MainPageState();
}
class _MainPageState extends State<MainPage> {
static const String url = "ws://<network_ipv4>:5000";
WebSocketChannel? _channel;
bool _isConnected = false;
void connect() {
_channel = IOWebSocketChannel.connect(Uri.parse(url));
setState(() {
_isConnected = true;
});
}
void disconnect() {
_channel!.sink.close();
setState(() {
_isConnected = false;
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
darkTheme: ThemeData(brightness: Brightness.dark),
themeMode: ThemeMode.dark,
home: Scaffold(
appBar: AppBar(
title: const Text("Live Video"),
),
body: Padding(
padding: const EdgeInsets.all(20.0),
child: Center(
child: Column(
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
ElevatedButton(
onPressed: connect,
style: buttonStyle,
child: const Text("Connect"),
),
ElevatedButton(
onPressed: disconnect,
style: buttonStyle,
child: const Text("Disconnect"),
),
],
),
const SizedBox(
height: 50.0,
),
_isConnected
? StreamBuilder(
stream: _channel!.stream,
builder: (context, snapshot) {
if (!snapshot.hasData) {
return const CircularProgressIndicator();
}
if (snapshot.connectionState == ConnectionState.done) {
return const Center(
child: Text("Connection Closed !"),
);
}
//? Working for single frames
return Image.memory(
Uint8List.fromList(
base64Decode(
(snapshot.data.toString()),
),
),
gaplessPlayback: true,
);
},
)
: const Text("Initiate Connection")
],
),
),
),
),
);
}
}
我正在做一个项目,我想将来自外部摄像头的实时视频显示到 Flutter 应用程序。我正在使用 websockets 来尝试实现这一点。
这是python
中制作的websocket服务器的代码import websockets
import asyncio
import mediapipe as mp
import cv2, struct, pickle, base64
mp_face_detection = mp.solutions.face_detection
mp_draw = mp.solutions.drawing_utils
face_detection = mp_face_detection.FaceDetection(min_detection_confidence=0.7)
port = 5000
def draw_bbox(res, frame):
for id, det in enumerate(res.detections):
# mp_draw.draw_detection(frame, det) #? Direct Method for drawing bounding box and feature points
coord = det.location_data.relative_bounding_box
ih, iw, ic = frame.shape
bbox = int(coord.xmin * iw), int(coord.ymin * ih), \
int(coord.width * iw), int(coord.height * ih)
cv2.rectangle(frame, bbox, (255, 0, 255), 2)
cv2.putText(
frame,
f'{int(det.score[0]*100)}%',
(bbox[0], bbox[1] - 20),
cv2.FONT_HERSHEY_PLAIN,
2,
(0, 255, 0),
2
)
print("Started server on port : ", port)
async def transmit(websocket, path):
print("Client Connected !")
try :
cap = cv2.VideoCapture(0)
while cap.isOpened():
img, frame = cap.read()
rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
res = face_detection.process(rgb)
if res.detections:
draw_bbox(res, frame)
a = pickle.dumps(frame)
msg = struct.pack("Q", len(a)) + a
await websocket.send(msg)
# cv2.imshow("Transimission", frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
except websockets.connection.ConnectionClosed as e:
print("Client Disconnected !")
cap.release()
# except:
# print("Someting went Wrong !")
start_server = websockets.serve(transmit, port=port)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
cap.release()
这工作正常并在其他 python 客户端上发送以字节编码的图像帧
这是 Flutter 的代码,我想在其中实时读取和显示这些帧
class _MainPageState extends State<MainPage> {
static const String url = "ws://<network_ipv4>:5000";
WebSocketChannel _channel = WebSocketChannel.connect(Uri.parse(url));
void _connectToWebsocket() {
_channel = WebSocketChannel.connect(Uri.parse(url));
}
void _disconnectToWebsocket() {
_channel.sink.close();
}
@override
Widget build(BuildContext context) {
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text("Live Video"),
),
body: Padding(
padding: const EdgeInsets.all(20.0),
child: Column(
children: [
Row(
children: [
ElevatedButton(
onPressed: _connectToWebsocket,
child: const Text("Force Connection")),
const SizedBox(
width: 120.0,
),
ElevatedButton(
onPressed: _disconnectToWebsocket,
child: const Text("Disconnect")),
],
),
StreamBuilder(
stream: _channel.stream,
builder: (context, snapshot) {
return snapshot.hasData
? Image.memory(
base64Decode(snapshot.data.toString()),
)
: const Center(
child: Text("No Data"),
);
},
)
],
),
),
),
);
}
}
请帮帮我
我已经解决了这个问题。问题在于,在转换为 bas64 字符串后,python 解释器将字符串添加到 b'' 中。因此 dart 编译器无法理解它。这是工作服务器代码
import websockets
import asyncio
import cv2, base64
print("Started server on port : ", port)
async def transmit(websocket, path):
print("Client Connected !")
try :
cap = cv2.VideoCapture(0)
while cap.isOpened():
_, frame = cap.read()
encoded = cv2.imencode('.jpg', frame)[1]
data = str(base64.b64encode(encoded))
data = data[2:len(data)-1]
await websocket.send(data)
cv2.imshow("Transimission", frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
except websockets.connection.ConnectionClosed as e:
print("Client Disconnected !")
start_server = websockets.serve(transmit, port=port)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
和 Flutter 应用程序客户端代码,如果有人想构建类似的应用程序
import 'dart:convert';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:patrolling_robot/src/styles/styles.dart';
import 'package:web_socket_channel/io.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
class MainPage extends StatefulWidget {
const MainPage({Key? key}) : super(key: key);
@override
State<MainPage> createState() => _MainPageState();
}
class _MainPageState extends State<MainPage> {
static const String url = "ws://<network_ipv4>:5000";
WebSocketChannel? _channel;
bool _isConnected = false;
void connect() {
_channel = IOWebSocketChannel.connect(Uri.parse(url));
setState(() {
_isConnected = true;
});
}
void disconnect() {
_channel!.sink.close();
setState(() {
_isConnected = false;
});
}
@override
Widget build(BuildContext context) {
return MaterialApp(
darkTheme: ThemeData(brightness: Brightness.dark),
themeMode: ThemeMode.dark,
home: Scaffold(
appBar: AppBar(
title: const Text("Live Video"),
),
body: Padding(
padding: const EdgeInsets.all(20.0),
child: Center(
child: Column(
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
ElevatedButton(
onPressed: connect,
style: buttonStyle,
child: const Text("Connect"),
),
ElevatedButton(
onPressed: disconnect,
style: buttonStyle,
child: const Text("Disconnect"),
),
],
),
const SizedBox(
height: 50.0,
),
_isConnected
? StreamBuilder(
stream: _channel!.stream,
builder: (context, snapshot) {
if (!snapshot.hasData) {
return const CircularProgressIndicator();
}
if (snapshot.connectionState == ConnectionState.done) {
return const Center(
child: Text("Connection Closed !"),
);
}
//? Working for single frames
return Image.memory(
Uint8List.fromList(
base64Decode(
(snapshot.data.toString()),
),
),
gaplessPlayback: true,
);
},
)
: const Text("Initiate Connection")
],
),
),
),
),
);
}
}