我正在尝试创建一个可以录制屏幕的应用程序,同时可以录制输入和输出音频。我在网上检查了很多东西但是找不到任何可以一起做的东西。
我尝试用sharpavi和naudio做点什么。底部的代码可以使用来自麦克风的语音记录屏幕,也可以记录来自扬声器的语音。问题是来自麦克风的屏幕视频和音频创建了一个视频文件,来自spearkes的语音创建了其他mp3文件。 (如果我的老板想要这样做我无法做到的事情:)
所以我想创建一个包含sreeen记录,输入和输出语音的视频文件。我希望你能帮助我。
private readonly int screenWidth;
private readonly int screenHeight;
private readonly AviWriter writer;
private readonly IAviVideoStream videoStream;
private readonly IAviAudioStream audioStream;
private readonly WaveInEvent audioSource;
private readonly Thread screenThread;
private readonly ManualResetEvent stopThread = new ManualResetEvent(false);
private readonly AutoResetEvent videoFrameWritten = new AutoResetEvent(false);
private readonly AutoResetEvent audioBlockWritten = new AutoResetEvent(false);
WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
WaveFileWriter writerx;
public Recorder(string fileName,
FourCC codec, int quality,
int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate)
{
System.Windows.Media.Matrix toDevice;
using (var source = new HwndSource(new HwndSourceParameters()))
{
toDevice = source.CompositionTarget.TransformToDevice;
}
screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11);
screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22);
// Create AVI writer and specify FPS
writer = new AviWriter(fileName)
{
FramesPerSecond = 10,
EmitIndex1 = true,
};
// Create video stream
videoStream = CreateVideoStream(codec, quality);
videoStream.Name = "Screencast";
var outputFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "NAudio");
Directory.CreateDirectory(outputFolder);
var outputFilePath = Path.Combine(outputFolder, "recordedx.wav");
writerx = new WaveFileWriter(outputFilePath, capture.WaveFormat);
if (audioSourceIndex >= 0)
{
while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped)
{
Thread.Sleep(500);
}
var waveFormat = ToWaveFormat(audioWaveFormat);
audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate);
audioStream.Name = "Voice";
audioSource = new WaveInEvent
{
DeviceNumber = audioSourceIndex,
WaveFormat = waveFormat,
BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond),
NumberOfBuffers = 3,
};
audioSource.DataAvailable += audioSource_DataAvailable;
capture.DataAvailable += Capture_DataAvailable; //
}
screenThread = new Thread(RecordScreen)
{
Name = typeof(Recorder).Name + ".RecordScreen",
IsBackground = true
};
if (audioSource != null)
{
videoFrameWritten.Set();
audioBlockWritten.Reset();
audioSource.StartRecording();
capture.StartRecording();//
}
screenThread.Start();
}
private void Capture_DataAvailable(object sender, WaveInEventArgs e)
{
writerx.Write(e.Buffer, 0, e.BytesRecorded);
}
private IAviVideoStream CreateVideoStream(FourCC codec, int quality)
{
if (codec == KnownFourCCs.Codecs.Uncompressed)
{
return writer.AddUncompressedVideoStream(screenWidth, screenHeight);
}
else if (codec == KnownFourCCs.Codecs.MotionJpeg)
{
return writer.AddMotionJpegVideoStream(screenWidth, screenHeight, quality);
}
else
{
return writer.AddMpeg4VideoStream(screenWidth, screenHeight, (double)writer.FramesPerSecond,
quality: quality,
codec: codec,
forceSingleThreadedAccess: true);
}
}
private IAviAudioStream CreateAudioStream(WaveFormat waveFormat, bool encode, int bitRate)
{
if (encode)
{
return writer.AddMp3AudioStream(waveFormat.Channels, waveFormat.SampleRate, bitRate);
}
else
{
return writer.AddAudioStream(
channelCount: waveFormat.Channels,
samplesPerSecond: waveFormat.SampleRate,
bitsPerSample: waveFormat.BitsPerSample);
}
}
private static WaveFormat ToWaveFormat(SupportedWaveFormat waveFormat)
{
switch (waveFormat)
{
case SupportedWaveFormat.WAVE_FORMAT_44M16:
return new WaveFormat(44100, 16, 1);
case SupportedWaveFormat.WAVE_FORMAT_44S16:
return new WaveFormat(44100, 16, 2);
default:
throw new NotSupportedException("Wave formats other than '16-bit 44.1kHz' are not currently supported.");
}
}
public void Dispose()
{
stopThread.Set();
screenThread.Join();
writerx.Dispose();//
writerx = null;//
capture.Dispose();//
if (audioSource != null)
{
audioSource.StopRecording();
audioSource.DataAvailable -= audioSource_DataAvailable;
}
writer.Close();
stopThread.Close();
}
private void RecordScreen()
{
var stopwatch = new Stopwatch();
var buffer = new byte[screenWidth * screenHeight * 4];
Task videoWriteTask = null;
var isFirstFrame = true;
var shotsTaken = 0;
var timeTillNextFrame = TimeSpan.Zero;
stopwatch.Start();
while (!stopThread.WaitOne(timeTillNextFrame))
{
GetScreenshot(buffer);
shotsTaken++;
// Wait for the previous frame is written
if (!isFirstFrame)
{
videoWriteTask.Wait();
videoFrameWritten.Set();
}
if (audioStream != null)
{
var signalled = WaitHandle.WaitAny(new WaitHandle[] { audioBlockWritten, stopThread });
if (signalled == 1)
break;
}
videoWriteTask = videoStream.WriteFrameAsync(true, buffer, 0,
timeTillNextFrame = TimeSpan.FromSeconds(shotsTaken / (double)writer.FramesPerSecond - stopwatch.Elapsed.TotalSeconds);
if (timeTillNextFrame < TimeSpan.Zero)
timeTillNextFrame = TimeSpan.Zero;
isFirstFrame = false;
}
stopwatch.Stop();
// Wait for the last frame is written
if (!isFirstFrame)
{
videoWriteTask.Wait();
}
}
private void GetScreenshot(byte[] buffer)
{
using (var bitmap = new Bitmap(screenWidth, screenHeight))
using (var graphics = Graphics.FromImage(bitmap))
{
graphics.CopyFromScreen(0, 0, 0, 0, new System.Drawing.Size(screenWidth, screenHeight));
var bits = bitmap.LockBits(new Rectangle(0, 0, screenWidth, screenHeight), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
Marshal.Copy(bits.Scan0, buffer, 0, buffer.Length);
bitmap.UnlockBits(bits);
}
}
private void audioSource_DataAvailable(object sender, WaveInEventArgs e)
{
var signalled = WaitHandle.WaitAny(new WaitHandle[] { videoFrameWritten, stopThread });
if (signalled == 0)
{
audioStream.WriteBlock(e.Buffer, 0, e.BytesRecorded);
audioBlockWritten.Set();
}
}