前言
C#屏幕共享可以通过以下几种方式实现:
- 使用Socket通信 - 这种方式可以实现实时屏幕共享,可以将屏幕数据传输到远端的接收端,在接收端将数据还原成图像,从而实现屏幕共享的效果。
- 使用VNC协议。
- 使用桌面流媒体传输协议(RDP) - RDP是一种专门用于远程桌面的协议,可以实现高效率、低延迟的远程桌面共享。
- 使用WebRTC - WebRTC是一种Web实时通信协议,可以实现Web浏览器之间的实时通信,包括屏幕共享。
总的来说,最好的实现方式应该根据具体的场景和需求来选择,以达到最好的效果。
使用Socket通信
https://cloud.tencent.com/developer/article/2293345
这种方式画面传输效果不是很好。
使用VNC协议
目前未找到C#可用的VNCServer库。
使用RDP
这种方式要求PC开启允许远程连接,但是家庭版的系统不支持,并且连接时要求有帐号和密码,所以不推荐。
使用WebRTC
https://blog.csdn.net/xiaoYong_520/article/details/120010259
WebRTC
https://github.com/sipsorcery-org/sipsorcery
我们这里使用SIPSorcery这个库。
这个库要求项目是64位的。
安装依赖
代码语言:javascript复制Install-Package SIPSorcery
Install-Package SIPSorceryMedia.Encoders -Pre
运行报错
System.IO.FileLoadException:“未能加载文件或程序集“Microsoft.Extensions.Logging.Abstractions, Version=6.0.0.2, Culture=neutral, PublicKeyToken=adb9793829ddae60”或它的某一个依赖项。找到的程序集清单定义与程序集引用不匹配
升级版本即可
代码语言:javascript复制Install-Package Microsoft.Extensions.Logging.Abstractions -Version 7.0.0
C#代码
代码语言:javascript复制private const int WEBSOCKET_PORT = 8081;
public MainWindow()
{
InitializeComponent();
Console.WriteLine(@"Starting web socket server...");
var webSocketServer = new WebSocketServer(
IPAddress.Any,
WEBSOCKET_PORT
);
webSocketServer.AddWebSocketService<WebRTCWebSocketPeer>(
"/",
(peer) => peer.CreatePeerConnection = CreatePeerConnection
);
webSocketServer.Start();
Console.WriteLine($@"Waiting for web socket connections on {webSocketServer.Address}:{webSocketServer.Port}...");
}
private static Task<RTCPeerConnection> CreatePeerConnection()
{
var pc = new RTCPeerConnection(null);
var screenSource = new ZVideoSource(new VpxVideoEncoder());
screenSource.SetFrameRate(30);
MediaStreamTrack videoTrack = new MediaStreamTrack(
screenSource.GetVideoSourceFormats(),
MediaStreamStatusEnum.SendOnly
);
pc.addTrack(videoTrack);
screenSource.OnVideoSourceEncodedSample = pc.SendVideo;
pc.OnVideoFormatsNegotiated = (formats) => screenSource.SetVideoSourceFormat(formats.First());
pc.onconnectionstatechange = async (state) =>
{
Console.WriteLine($@"Peer connection state change to {state}.");
switch (state)
{
case RTCPeerConnectionState.connected:
await screenSource.StartVideo();
break;
case RTCPeerConnectionState.failed:
pc.Close("ice disconnection");
break;
case RTCPeerConnectionState.closed:
await screenSource.CloseVideo();
screenSource.Dispose();
break;
}
};
return Task.FromResult(pc);
}
其中ZVideoSource是自定义的桌面采集的类
ZVideoSource.cs
代码语言:javascript复制namespace z_remote_control.Utils
{
using SIPSorceryMedia.Abstractions;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
public class ZVideoSource : IVideoSource, IDisposable
{
public static readonly List<VideoFormat> SupportedFormats = new List<VideoFormat>()
{
new VideoFormat(
VideoCodecsEnum.VP8,
96
),
new VideoFormat(
VideoCodecsEnum.H264,
100,
parameters: "packetization-mode=1"
)
};
private int _frameSpacing;
private readonly byte[] _myI420Buffer;
private readonly Timer _sendTestPatternTimer;
private bool _isStarted;
private bool _isPaused;
private bool _isClosed;
private bool _isMaxFrameRate;
private int _frameCount;
private readonly IVideoEncoder _videoEncoder;
private readonly MediaFormatManager<VideoFormat> _formatManager;
public event RawVideoSampleDelegate OnVideoSourceRawSample;
public event RawVideoSampleFasterDelegate OnVideoSourceRawSampleFaster;
public event EncodedSampleDelegate OnVideoSourceEncodedSample;
public event SourceErrorDelegate OnVideoSourceError;
private const int SCREEN_WIDTH = 1280;
private const int SCREEN_HEIGHT = 720;
public ZVideoSource(IVideoEncoder encoder = null)
{
if (encoder != null)
{
_videoEncoder = encoder;
_formatManager = new MediaFormatManager<VideoFormat>(SupportedFormats);
}
_myI420Buffer = new byte[10 * 1024 * 1024];
UpdateBuffer();
_sendTestPatternTimer = new Timer(
GenerateTestPattern,
null,
-1,
-1
);
_frameSpacing = 33;
}
private void UpdateBuffer()
{
var source = ZScreenUtils.CaptureScreen(
SCREEN_WIDTH,
SCREEN_HEIGHT
);
var i420Byte = PixelConverter.BGRtoI420(
source,
SCREEN_WIDTH,
SCREEN_HEIGHT,
SCREEN_WIDTH * 3
);
Buffer.BlockCopy(
i420Byte,
0,
_myI420Buffer,
0,
i420Byte.Length
);
}
public void RestrictFormats(Func<VideoFormat, bool> filter) => _formatManager.RestrictFormats(filter);
public List<VideoFormat> GetVideoSourceFormats() => _formatManager.GetSourceFormats();
public void SetVideoSourceFormat(VideoFormat videoFormat) => _formatManager.SetSelectedFormat(videoFormat);
public List<VideoFormat> GetVideoSinkFormats() => _formatManager.GetSourceFormats();
public void SetVideoSinkFormat(VideoFormat videoFormat) => _formatManager.SetSelectedFormat(videoFormat);
public void ForceKeyFrame() => _videoEncoder?.ForceKeyFrame();
public bool HasEncodedVideoSubscribers() => OnVideoSourceEncodedSample != null;
public void ExternalVideoSourceRawSample
(
uint durationMilliseconds,
int width,
int height,
byte[] sample,
VideoPixelFormatsEnum pixelFormat
)
{
}
public void ExternalVideoSourceRawSampleFaster
(
uint durationMilliseconds,
RawImage rawImage
)
{
}
public bool IsVideoSourcePaused() => _isPaused;
public void SetFrameRate(int framesPerSecond)
{
if (framesPerSecond < 1 || framesPerSecond > 60)
{
Console.WriteLine(
string.Format(
@"Frames per second not in the allowed range of {0} to {1}, ignoring.",
1,
60
),
Array.Empty<object>()
);
}
else
{
_frameSpacing = 1000 / framesPerSecond;
if (!_isStarted)
return;
_sendTestPatternTimer.Change(
0,
_frameSpacing
);
}
}
public void SetMaxFrameRate(bool isMaxFrameRate)
{
if (_isMaxFrameRate == isMaxFrameRate)
return;
_isMaxFrameRate = isMaxFrameRate;
if (!_isStarted)
return;
if (_isMaxFrameRate)
{
_sendTestPatternTimer.Change(
-1,
-1
);
GenerateMaxFrames();
}
else
_sendTestPatternTimer.Change(
0,
_frameSpacing
);
}
public Task PauseVideo()
{
_isPaused = true;
_sendTestPatternTimer.Change(
-1,
-1
);
return Task.CompletedTask;
}
public Task ResumeVideo()
{
_isPaused = false;
_sendTestPatternTimer.Change(
0,
_frameSpacing
);
return Task.CompletedTask;
}
public Task StartVideo()
{
if (!_isStarted)
{
_isStarted = true;
if (_isMaxFrameRate)
GenerateMaxFrames();
else
_sendTestPatternTimer.Change(
0,
_frameSpacing
);
}
return Task.CompletedTask;
}
public Task CloseVideo()
{
if (_isClosed)
return Task.CompletedTask;
_isClosed = true;
ManualResetEventSlim mre = new ManualResetEventSlim();
_sendTestPatternTimer?.Dispose(mre.WaitHandle);
return Task.Run(() => mre.Wait(1000));
}
private void GenerateMaxFrames()
{
DateTime now = DateTime.Now;
while (!_isClosed && _isMaxFrameRate)
{
_frameSpacing = Convert.ToInt32(DateTime.Now.Subtract(now).TotalMilliseconds);
GenerateTestPattern(null);
now = DateTime.Now;
}
}
private void GenerateTestPattern(object state)
{
lock (_sendTestPatternTimer)
{
if (_isClosed || OnVideoSourceRawSample == null && OnVideoSourceEncodedSample == null)
return;
_frameCount;
StampI420Buffer(
_myI420Buffer,
SCREEN_WIDTH,
SCREEN_HEIGHT,
_frameCount
);
if (OnVideoSourceRawSample != null)
GenerateRawSample(
SCREEN_WIDTH,
SCREEN_HEIGHT,
_myI420Buffer
);
if (_videoEncoder != null && OnVideoSourceEncodedSample != null)
{
VideoFormat selectedFormat = _formatManager.SelectedFormat;
if (!selectedFormat.IsEmpty())
{
IVideoEncoder videoEncoder = _videoEncoder;
UpdateBuffer();
byte[] testI420Buffer = _myI420Buffer;
selectedFormat = _formatManager.SelectedFormat;
int codec = (int)selectedFormat.Codec;
byte[] sample = videoEncoder.EncodeVideo(
SCREEN_WIDTH,
SCREEN_HEIGHT,
testI420Buffer,
VideoPixelFormatsEnum.I420,
(VideoCodecsEnum)codec
);
if (sample != null)
OnVideoSourceEncodedSample(
90000U / (_frameSpacing > 0 ? 1000U / (uint)_frameSpacing : 30U),
sample
);
}
}
if (_frameCount != int.MaxValue)
return;
_frameCount = 0;
}
}
private void GenerateRawSample
(
int width,
int height,
byte[] i420Buffer
)
{
byte[] sample = PixelConverter.I420toBGR(
i420Buffer,
width,
height,
out int _
);
RawVideoSampleDelegate videoSourceRawSample = OnVideoSourceRawSample;
if (videoSourceRawSample == null)
return;
videoSourceRawSample(
(uint)_frameSpacing,
width,
height,
sample,
VideoPixelFormatsEnum.Bgr
);
}
public static void StampI420Buffer
(
byte[] i420Buffer,
int width,
int height,
int frameNumber
)
{
int num1 = width - 20 - 10;
int num2 = height - 20 - 10;
for (int index1 = num2; index1 < num2 20; index1)
{
for (int index2 = num1; index2 < num1 20; index2)
i420Buffer[index1 * width index2] = (byte)(frameNumber % byte.MaxValue);
}
}
public void Dispose()
{
_isClosed = true;
_sendTestPatternTimer?.Dispose();
_videoEncoder?.Dispose();
}
}
}
获取截图BGR的工具类
代码语言:javascript复制using System.IO;
namespace z_remote_control.Utils
{
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Windows.Forms;
public class ZScreenUtils
{
private const PixelFormat FORMAT = PixelFormat.Format24bppRgb;
/// <summary>
/// 获取屏幕的Byte[]
/// </summary>
/// <param name="maxHeight"></param>
/// <returns></returns>
public static byte[] GetScreenshot(int maxHeight = 720)
{
var screen = GetScreen();
Bitmap targetPic;
int width = screen.Width;
int height = screen.Height;
if (screen.Height > maxHeight)
{
double rate = 1.0d * screen.Height / maxHeight;
height = (int)(height / rate);
width = (int)(width / rate);
targetPic = ScalePic(
screen,
width,
height
);
screen.Dispose();
}
else
{
targetPic = screen;
}
var picByte = GetPicByte(
targetPic,
40
);
targetPic.Dispose();
return picByte;
}
public static Bitmap GetScreen()
{
Bitmap screenshot = new Bitmap(
Screen.PrimaryScreen.Bounds.Width,
Screen.PrimaryScreen.Bounds.Height,
FORMAT
);
using (Graphics gfx = Graphics.FromImage(screenshot))
{
gfx.CopyFromScreen(
Screen.PrimaryScreen.Bounds.X,
Screen.PrimaryScreen.Bounds.Y,
0,
0,
Screen.PrimaryScreen.Bounds.Size,
CopyPixelOperation.SourceCopy
);
// 绘制光标图标
// 创建一个红色的画刷
Brush brush = new SolidBrush(Color.LimeGreen);
gfx.FillEllipse(
brush,
Cursor.Position.X - 10,
Cursor.Position.Y - 10,
20,
20
);
return screenshot;
}
}
public static Bitmap ScalePic
(
Bitmap sourrce,
int width,
int height
)
{
Bitmap result = new Bitmap(
width,
height,
FORMAT
);
using (Graphics g = Graphics.FromImage(result))
{
g.InterpolationMode = InterpolationMode.HighQualityBicubic;
g.DrawImage(
sourrce,
0,
0,
width,
height
);
return result;
}
}
public static byte[] GetPicByte(Bitmap sourrce)
{
using (MemoryStream ms = new MemoryStream())
{
sourrce.Save(
ms,
ImageFormat.Jpeg
);
return ms.ToArray();
}
}
public static byte[] GetPicByte
(
Bitmap sourrce,
int quality
)
{
using (MemoryStream ms = new MemoryStream())
{
ImageCodecInfo jpegCodec = GetEncoderInfo("image/jpeg");
var encoderParameters = GetEncoderParameters(quality);
sourrce.Save(
ms,
jpegCodec,
encoderParameters
);
return ms.ToArray();
}
}
public static EncoderParameters GetEncoderParameters(int quality = 90)
{
Encoder qualityEncoder = Encoder.Quality;
EncoderParameters encoderParams = new EncoderParameters(1);
encoderParams.Param[0] = new EncoderParameter(
qualityEncoder,
quality
);
return encoderParams;
}
/// <summary>
/// 获取解码器
/// </summary>
/// <param name="mimeType">
/// </param>
/// <returns>
/// </returns>
private static ImageCodecInfo GetEncoderInfo(string mimeType)
{
int j;
var encoders = ImageCodecInfo.GetImageEncoders();
for (j = 0; j < encoders.Length; j)
{
if (encoders[j].MimeType == mimeType)
{
return encoders[j];
}
}
return null;
}
/// <summary>
/// 获取屏幕并转为Bgr格式
/// </summary>
/// <param name="targetWidth"></param>
/// <param name="targetHeight"></param>
/// <returns></returns>
public static byte[] CaptureScreen
(
int targetWidth,
int targetHeight
)
{
var screen = GetScreen();
var targetPic = ScalePic(
screen,
targetWidth,
targetHeight
);
screen.Dispose();
byte[] bgrByte = Bitmap2Bgr(targetPic);
targetPic.Dispose();
return bgrByte;
}
public static byte[] Bitmap2Bgr(Bitmap bitmap)
{
BitmapData data = bitmap.LockBits(
new Rectangle(
0,
0,
bitmap.Width,
bitmap.Height
),
ImageLockMode.ReadWrite,
PixelFormat.Format24bppRgb
);
bitmap.UnlockBits(data);
int dstBytes = data.Stride * data.Height;
byte[] dstValues = new byte[dstBytes];
System.IntPtr srcPtr = data.Scan0;
Marshal.Copy(
srcPtr,
dstValues,
0,
dstBytes
);
return dstValues;
}
}
}
Web测试页面
代码语言:javascript复制<!DOCTYPE html>
<head>
<script type="text/javascript">
const WEBSOCKET_URL = "ws://127.0.0.1:8081/"
let pc, ws;
async function start() {
pc = new RTCPeerConnection();
pc.ontrack = evt => document.querySelector('#videoCtl').srcObject = evt.streams[0];
pc.onicecandidate = evt => evt.candidate && ws.send(JSON.stringify(evt.candidate));
ws = new WebSocket(document.querySelector('#websockurl').value, []);
ws.onmessage = async function (evt) {
const obj = JSON.parse(evt.data);
if (obj?.candidate) {
await pc.addIceCandidate(obj);
} else if (obj?.sdp) {
await pc.setRemoteDescription(new RTCSessionDescription(obj));
pc.createAnswer()
.then((answer) => pc.setLocalDescription(answer))
.then(() => ws.send(JSON.stringify(pc.localDescription)));
}
};
}
async function closePeer() {
await pc?.close();
await ws?.close();
}
</script>
<title>WebRTC</title>
</head>
<body>
<video controls autoplay="autoplay" id="videoCtl" width="640" height="480"></video>
<div>
<label for="websockurl">WS:</label><input type="text" id="websockurl" size="40"/>
<button type="button" class="btn btn-success" onclick="start();">Start</button>
<button type="button" class="btn btn-success" onclick="closePeer();">Close</button>
</div>
</body>
<script>
document.querySelector('#websockurl').value = WEBSOCKET_URL;
</script>