|
// deviceManager.js
|
let cachedVideoDevices = null;
|
|
export const getVideoDevices = async (forceRefresh = false) => {
|
if (cachedVideoDevices && !forceRefresh) {
|
return cachedVideoDevices;
|
}
|
|
try {
|
if (!navigator.mediaDevices?.enumerateDevices) {
|
throw new Error('MediaDevices API not supported');
|
}
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
cachedVideoDevices = devices
|
.filter(device => device.kind === "videoinput")
|
.map(device => ({
|
label: device.label,
|
id: device.deviceId,
|
groupId: device.groupId // 添加更多有用信息
|
}));
|
|
return cachedVideoDevices;
|
} catch (err) {
|
console.error('Error enumerating devices:', err);
|
throw err;
|
}
|
};
|
|
// 监听设备变化
|
if (navigator.mediaDevices?.addEventListener) {
|
navigator.mediaDevices.addEventListener('devicechange', () => {
|
cachedVideoDevices = null; // 清除缓存
|
// 可以在这里触发自定义事件或回调
|
});
|
}
|
|
export const stopCamera = async(video) => {
|
if(video == null) return;
|
let stream = video.srcObject;
|
if(stream==null) return;
|
setTimeout(()=>{
|
if (stream) {
|
let tracks = stream.getTracks();
|
tracks.forEach((x) => {
|
x.stop();
|
});
|
video.srcObject = null;
|
}
|
},500);
|
};
|
|
export const stopStream = async(stream) => {
|
if (stream) {
|
let tracks = stream.getTracks();
|
tracks.forEach((x) => {
|
x.stop();
|
});
|
}
|
};
|
|
export const drawVideoToCanvas = (video,canvas) =>{
|
canvas.width = canvas.offsetWidth;
|
canvas.height = canvas.offsetHeight;
|
|
// 绘制缩放后的视频帧
|
const ctx = canvas.getContext('2d');
|
ctx.imageSmoothingEnabled = false;
|
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
}
|
|
export const clearCanvas = (canvas) => {
|
if(canvas == null) {
|
console.error("Canvas element is null");
|
return;
|
}
|
const context = canvas.getContext('2d');
|
context.clearRect(0, 0, canvas.width, canvas.height);
|
}
|
|
export const captureImageFromVideo = (video) => {
|
// 创建一个画布元素,设置画布尺寸为视频流的尺寸
|
const canvas = document.createElement("canvas");
|
// 设置画布大小与摄像大小一致
|
canvas.width = video.videoWidth;
|
canvas.height = video.videoHeight;
|
// 获取画布上下文对象
|
const ctx = canvas.getContext("2d");
|
// 绘制当前视频帧到画布上
|
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
// 将画布内容转为 Base64 数据
|
const imageDataUrl = canvas.toDataURL("image/jpeg",0.8);
|
return imageDataUrl;
|
}
|
|
export const captureBlobFromVideo = async (video) => {
|
// 创建一个画布元素,设置画布尺寸为视频流的尺寸
|
const canvas = document.createElement("canvas");
|
// 设置画布大小与摄像大小一致
|
canvas.width = video.videoWidth;
|
canvas.height = video.videoHeight;
|
// 获取画布上下文对象
|
const ctx = canvas.getContext("2d");
|
ctx.imageSmoothingEnabled = false;
|
// 绘制当前视频帧到画布上
|
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
const imageData = await new Promise(resolve => {
|
canvas.toBlob(resolve, 'image/jpeg', 0.9);
|
});
|
return imageData;
|
}
|
|
export const createCanvasFromVideo = (video) => {
|
// 创建一个画布元素,设置画布尺寸为视频流的尺寸
|
const canvas = document.createElement("canvas");
|
// 设置画布大小与摄像大小一致
|
canvas.width = video.videoWidth;
|
canvas.height = video.videoHeight;
|
// 获取画布上下文对象
|
const ctx = canvas.getContext("2d");
|
// 绘制当前视频帧到画布上
|
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
return ctx;
|
}
|