录制按钮

界面文件:
 
 
主界面:OBSBasic.ui 中开始录制按钮的objectName 是 recordButton

槽函数:
void OBSBasic::on_recordButton_clicked()
{
    //1 输出模式是否被激活
	if (outputHandler->RecordingActive()) {
		bool confirm = config_get_bool(GetGlobalConfig(), "BasicWindow",
					       "WarnBeforeStoppingRecord");
		if (confirm && isVisible()) {
			QMessageBox::StandardButton button =
				OBSMessageBox::question(
					this, QTStr("ConfirmStopRecord.Title"),
					QTStr("ConfirmStopRecord.Text"),
					QMessageBox::Yes | QMessageBox::No,
					QMessageBox::No);
			if (button == QMessageBox::No) {
				ui->recordButton->setChecked(true);
				return;
			}
		}
		StopRecording();
	} else {
		if (!UIValidation::NoSourcesConfirmation(this)) {
			ui->recordButton->setChecked(false);
			return;
		}
        // 执行
		StartRecording();
	}
}调用堆栈:

开始录制
void OBSBasic::StartRecording()
{
	if (outputHandler->RecordingActive())
		return;
	if (disableOutputsRef)
		return;
    // 1 检查磁盘是否有效
	if (!OutputPathValid()) {
		OutputPathInvalidMessage();
		ui->recordButton->setChecked(false);
		return;
	}
    
    //2 检查磁盘空间
	if (LowDiskSpace()) {
		DiskSpaceMessage();
		ui->recordButton->setChecked(false);
		return;
	}
	if (api)
		api->on_event(OBS_FRONTEND_EVENT_RECORDING_STARTING);
	SaveProject();
   
    //3 开始录制 
	if (!outputHandler->StartRecording())
		ui->recordButton->setChecked(false);
}
// 简单输出
bool SimpleOutput::StartRecording()
{
	UpdateRecording();
	if (!ConfigureRecording(false))
		return false;
	if (!obs_output_start(fileOutput)) {
		QString error_reason;
		const char *error = obs_output_get_last_error(fileOutput);
		if (error)
			error_reason = QT_UTF8(error);
		else
			error_reason = QTStr("Output.StartFailedGeneric");
		QMessageBox::critical(main,
				      QTStr("Output.StartRecordingFailed"),
				      error_reason);
		return false;
	}
	return true;
}
调用libobs
E:\opensrc\obs_src_19041\obs-studio-19141\obs-studio\libobs\obs-output.c
bool obs_output_start(obs_output_t *output)
{
	bool encoded;
	bool has_service;
	if (!obs_output_valid(output, "obs_output_start"))
		return false;
	if (!output->context.data)
		return false;
	has_service = (output->info.flags & OBS_OUTPUT_SERVICE) != 0;
	if (has_service && !obs_service_initialize(output->service, output))
		return false;
	encoded = (output->info.flags & OBS_OUTPUT_ENCODED) != 0;
	if (encoded && output->delay_sec) {
		return obs_output_delay_start(output);
	} else {
		if (obs_output_actual_start(output)) {
			do_output_signal(output, "starting");
			return true;
		}
		return false;
	}
}停止录制:
void SimpleOutput::StopRecording(bool force)
{
	if (force)
		obs_output_force_stop(fileOutput);
	else
		obs_output_stop(fileOutput);
}调用的都是libobs.dll的导出 导出方法 obs.h中声明
/** Starts the output. */
EXPORT bool obs_output_start(obs_output_t *output);
/** Stops the output. */
EXPORT void obs_output_stop(obs_output_t *output);obs输出模式
高級输出

bool AdvancedOutput::StartRecording()
{
	const char *path;
	const char *recFormat;
	const char *filenameFormat;
	bool noSpace = false;
	bool overwriteIfExists = false;
	if (!useStreamEncoder) {
		if (!ffmpegOutput) {
			UpdateRecordingSettings();
		}
	} else if (!obs_output_active(streamOutput)) {
		UpdateStreamSettings();
	}
	UpdateAudioSettings();
    // 1 设置输出,ffmpeg 输出 or 录制输出
	if (!Active())
		SetupOutputs();
    //2 配置
	if (!ffmpegOutput || ffmpegRecording) {
		path = config_get_string(main->Config(), "AdvOut",
					 ffmpegRecording ? "FFFilePath"
							 : "RecFilePath");
		recFormat = config_get_string(main->Config(), "AdvOut",
					      ffmpegRecording ? "FFExtension"
							      : "RecFormat");
		filenameFormat = config_get_string(main->Config(), "Output",
						   "FilenameFormatting");
		overwriteIfExists = config_get_bool(main->Config(), "Output",
						    "OverwriteIfExists");
		noSpace = config_get_bool(main->Config(), "AdvOut",
					  ffmpegRecording
						  ? "FFFileNameWithoutSpace"
						  : "RecFileNameWithoutSpace");
		string strPath = GetRecordingFilename(path, recFormat, noSpace,
						      overwriteIfExists,
						      filenameFormat,
						      ffmpegRecording);
		obs_data_t *settings = obs_data_create();
		obs_data_set_string(settings, ffmpegRecording ? "url" : "path",
				    strPath.c_str());
		obs_output_update(fileOutput, settings);
		obs_data_release(settings);
	}
    //3 start
	if (!obs_output_start(fileOutput)) {
		QString error_reason;
		const char *error = obs_output_get_last_error(fileOutput);
		if (error)
			error_reason = QT_UTF8(error);
		else
			error_reason = QTStr("Output.StartFailedGeneric");
		QMessageBox::critical(main,
				      QTStr("Output.StartRecordingFailed"),
				      error_reason);
		return false;
	}
	return true;
}录制

 
为什么不使用ffmepeg命令行录制
- 1 录制窗口鼠标闪烁
- 2 无法处理屏幕分辨率非100%时的区域的录制不全
- 3 热插拔时,无法更改采集源
- 4 无法录制DirectUi窗口(无窗口句柄)
FFmpeg
Builds - CODEX FFMPEG @ gyan.dev

 链接:https://pan.baidu.com/s/1owwUbNEUzXQMARaOlfRH3w?pwd=yy3j 
 提取码:yy3j
OBS采集模块
- 显示器采集:core中的 libobs-d3d11和 libos-winrt 以及 duplicator-monitor-capture.c
- 窗口采集:win-capture window-capture.c
- 游戏采集:game-capture.c
- 采集卡采集:decklink
- 摄像头采集:win-dshow
窗口采集方式

 
BitBlt的采集效率比较低,且只能采集一些有窗口句柄的窗口,无窗口句柄得用WGC。例如谷歌浏览器,VSCode 等都是无句柄窗口,这些窗口用BitBlt采集,显示都是黑色的
E:\opensrc\obs_src_19041\obs-studio-19141\obs-studio\plugins\win-capture\window-capture.c
enum window_capture_method {
	METHOD_AUTO,
	METHOD_BITBLT,
	METHOD_WGC,
};具体使用哪种方式根据窗口特点,在自动模式下:当是以下窗口是使用wgc 否则使用bltblt,
static const char *wgc_partial_match_classes[] = {
	"Chrome",
	"Mozilla",
	NULL,
};
static const char *wgc_whole_match_classes[] = {
	"ApplicationFrameWindow",
	"Windows.UI.Core.CoreWindow",
	"XLMAIN",        /* excel*/
	"PPTFrameClass", /* powerpoint */
	"OpusApp",       /* word */
	NULL,
};choose_method(enum window_capture_method method, bool wgc_supported,
	      const char *current_class)
{
	if (!wgc_supported)
		return METHOD_BITBLT;
	if (method != METHOD_AUTO)
		return method;
	if (!current_class)
		return METHOD_BITBLT;
	const char **class = wgc_partial_match_classes;
	while (*class) {
		if (astrstri(current_class, *class) != NULL) {
			return METHOD_WGC;
		}
		class ++;
	}
	class = wgc_whole_match_classes;
	while (*class) {
		if (astrcmpi(current_class, *class) == 0) {
			return METHOD_WGC;
		}
		class ++;
	}
	return METHOD_BITBLT;
}采集方式如上;

 WGC本质是使用到d3d11采集,GPU
Windows.Graphics.Capture Namespace - Windows UWP applications | Microsoft Learn
E:\opensrc\obs_src_19041\obs-studio-19141\obs-studio\plugins\win-capture\window-capture.c

桌面采集

Advances to the display Infrastructure - Windows drivers | Microsoft Learn
Desktop duplication - Windows drivers | Microsoft Learn
两种采集方式:
enum display_capture_method {
	METHOD_AUTO,
	METHOD_DXGI,
	METHOD_WGC,
};采集方式的选择:
bool obs_module_load(void)
{
	struct win_version_info ver;
	bool win8_or_above = false;
	char *config_dir;
     //1 确定windows 版本
	struct win_version_info win1903 = {
		.major = 10, .minor = 0, .build = 18362, .revis = 0};
	config_dir = obs_module_config_path(NULL);
	if (config_dir) {
		os_mkdirs(config_dir);
		bfree(config_dir);
	}
	get_win_ver(&ver);
    // 2 win8
	win8_or_above = ver.major > 6 || (ver.major == 6 && ver.minor >= 2);
    // 3 使用d3d
	obs_enter_graphics();
	graphics_uses_d3d11 = gs_get_device_type() == GS_DEVICE_DIRECT3D_11;
	obs_leave_graphics();
    // 4 支持wgc
	if (graphics_uses_d3d11)
		wgc_supported = win_version_compare(&ver, &win1903) >= 0;
    // 5 选择使用duplicator_capture_info (wgc)或monitor_capture_info(gdi)  
	if (win8_or_above && graphics_uses_d3d11)
		obs_register_source(&duplicator_capture_info);
	else
		obs_register_source(&monitor_capture_info);
	obs_register_source(&window_capture_info);
	char *config_path = obs_module_config_path(NULL);
	init_hook_files();
	init_hooks_thread =
		CreateThread(NULL, 0, init_hooks, config_path, 0, NULL);
	obs_register_source(&game_capture_info);
	return true;
}
E:\opensrc\obs_src_19041\obs-studio-19141\obs-studio\plugins\win-capture\duplicator-monitor-capture.c
static void duplicator_capture_tick(void *data, float seconds)
{
	struct duplicator_capture *capture = data;
	/* completely shut down monitor capture if not in use, otherwise it can
	 * sometimes generate system lag when a game is in fullscreen mode */
	if (!obs_source_showing(capture->source)) {
		if (capture->showing) {
			obs_enter_graphics();
			free_capture_data(capture);
			obs_leave_graphics();
			capture->showing = false;
		}
		return;
	}
	/* always try to load the capture immediately when the source is first
	 * shown */
	if (!capture->showing) {
		capture->reset_timeout = RESET_INTERVAL_SEC;
	}
	obs_enter_graphics();
    
    // 采集模式选择      
	if (capture->method == METHOD_WGC) {//wgc
		if (capture->reset_wgc && capture->capture_winrt) {
			capture->exports.winrt_capture_free(
				capture->capture_winrt);
			capture->capture_winrt = NULL;
			capture->reset_wgc = false;
			capture->reset_timeout = RESET_INTERVAL_SEC;
		}
		if (!capture->capture_winrt) {
			capture->reset_timeout += seconds;
			if (capture->reset_timeout >= RESET_INTERVAL_SEC) {
				capture->capture_winrt =
					capture->exports
						.winrt_capture_init_monitor(
							capture->capture_cursor,
							capture->handle);
				capture->reset_timeout = 0.0f;
			}
		}
	} else {
		if (capture->capture_winrt) {
			capture->exports.winrt_capture_free(
				capture->capture_winrt);
			capture->capture_winrt = NULL;
		}
		if (!capture->duplicator) {
			capture->reset_timeout += seconds;
             //桌面复制
			if (capture->reset_timeout >= RESET_INTERVAL_SEC) {
				capture->duplicator = gs_duplicator_create(
					capture->dxgi_index);
				capture->reset_timeout = 0.0f;
			}
		}
		if (capture->duplicator) {
			if (capture->capture_cursor)
				cursor_capture(&capture->cursor_data);
			if (!gs_duplicator_update_frame(capture->duplicator)) {
				free_capture_data(capture);
			} else if (capture->width == 0) {
				reset_capture_data(capture);
			}
		}
	}
	obs_leave_graphics();
	if (!capture->showing)
		capture->showing = true;
	UNUSED_PARAMETER(seconds);
}默认会走到桌面复制

 
 
 当选择 windows 10 則使用wgc
BitBlt x264 采集

四个线程:
- video_thread:视频编码和输出线程,后面简称“video 线程,负责视频编码和输出
- obs_graphics_thread:视频渲染线程,后面简称“graphics 线程,也就是源合成音视频、生成原始视频帧、显示到窗口(预览)等功能
- audio_thread: 音频编码和输出线程,后面简称“audio 线程
- CaptureThread:音频采集
当点击开始录制按钮时,在ffmpeg_mux_start 中,raw_active被设置为ture,obs_graphics_thread post信号量,video_thread接收到信号量后,编码输出

点击开始录制后:
video_thread线程的创建过程:
ResetVideo
AttemptToResetVideo
--obs_reset_video
-- obs_init_video
--video_output_open(&video->video, &vi);
--pthread_create(&out->thread, NULL, video_thread 创建video线程
--pthread_create(&video->video_thread, NULL, obs_graphics_thread, obs);创建视频渲染线程
在obs_graphics_thread中,通过while (obs_graphics_thread_loop(&context)) 处理任务:
 当帧准备好后,通过 output_video_data 调用video_output_unlock_frame  发送信号量
当帧准备好后,通过 output_video_data 调用video_output_unlock_frame  发送信号量

通知video线程开始工作 :

video线程拿到信号量进入while循环

video_output_cur_frame 处理当前视频帧
 
 
scale_video_output 做scale 处理,采集到rgb转yuv,等
input->callback是static void receive_video(void *param, struct video_data *frame)
做视频编码:

 
 
进入具体的编码方式:obs_x264_encode
static bool obs_x264_encode(void *data, struct encoder_frame *frame,
			    struct encoder_packet *packet,
			    bool *received_packet)
{
	struct obs_x264 *obsx264 = data;
	x264_nal_t *nals;
	int nal_count;
	int ret;
	x264_picture_t pic, pic_out;
	if (!frame || !packet || !received_packet)
		return false;
	if (frame)
		init_pic_data(obsx264, &pic, frame);
	ret = x264_encoder_encode(obsx264->context, &nals, &nal_count,
				  (frame ? &pic : NULL), &pic_out);
	if (ret < 0) {
		warn("encode failed");
		return false;
	}
	*received_packet = (nal_count != 0);
	parse_packet(obsx264, packet, nals, nal_count, &pic_out);
	return true;
}其中:x264_encoder_encode 是依赖中的接口
E:\opensrc\obs_src_19041\dependencies2019\win32\include\x264.h
结束录制
主线程调用堆栈:
obs-ffmpeg.dll!ffmpeg_mux_stop(void * data, unsigned __int64 ts)
obs.dll!obs_output_actual_stop(obs_output * output, bool force, unsigned __int64 ts)
obs.dll!obs_output_stop(obs_output * output)
obs32.exe!SimpleOutput::StopRecording(bool force)
obs32.exe!OBSBasic::StopRecording()
ucrtbased.dll线程 obs_x264_destroy:x64销毁

音频设备初始化以及音频采集线程创建

E:\opensrc\obs_src_19041\obs-studio-19141\obs-studio\plugins\win-wasapi\win-wasapi.cpp
static void *CreateWASAPISource(obs_data_t *settings, obs_source_t *source,
				bool input)
{
	try {
		return new WASAPISource(settings, source, input);
	} catch (const char *error) {
		blog(LOG_ERROR, "[CreateWASAPISource] %s", error);
	}
	return nullptr;
}com初始化
WASAPISource::WASAPISource(obs_data_t *settings, obs_source_t *source_,
			   bool input)
	: source(source_), isInputDevice(input)
{
	UpdateSettings(settings);
	stopSignal = CreateEvent(nullptr, true, false, nullptr);
	if (!stopSignal.Valid())
		throw "Could not create stop signal";
	receiveSignal = CreateEvent(nullptr, false, false, nullptr);
	if (!receiveSignal.Valid())
		throw "Could not create receive signal";
	Start();
}
inline void WASAPISource::Start()
{
	if (!TryInitialize()) {
		blog(LOG_INFO,
		     "[WASAPISource::WASAPISource] "
		     "Device '%s' not found.  Waiting for device",
		     device_id.c_str());
		Reconnect();
	}
}
......
void WASAPISource::Initialize()
{
	HRESULT res;
    //初始化com
	res = CoCreateInstance(__uuidof(MMDeviceEnumerator), nullptr,
			       CLSCTX_ALL, __uuidof(IMMDeviceEnumerator),
			       (void **)enumerator.Assign());
	if (FAILED(res))
		throw HRError("Failed to create enumerator", res);
	if (!InitDevice())
		return;
	device_name = GetDeviceName(device);
	if (!notify) {
		notify = new WASAPINotify(this);
		enumerator->RegisterEndpointNotificationCallback(notify);
	}
	HRESULT resSample;
	IPropertyStore *store = nullptr;
	PWAVEFORMATEX deviceFormatProperties;
	PROPVARIANT prop;
	resSample = device->OpenPropertyStore(STGM_READ, &store);
	if (!FAILED(resSample)) {
		resSample =
			store->GetValue(PKEY_AudioEngine_DeviceFormat, &prop);
		if (!FAILED(resSample)) {
			if (prop.vt != VT_EMPTY && prop.blob.pBlobData) {
				deviceFormatProperties =
					(PWAVEFORMATEX)prop.blob.pBlobData;
				device_sample = std::to_string(
					deviceFormatProperties->nSamplesPerSec);
			}
		}
		store->Release();
	}
	InitClient();
	if (!isInputDevice)
		InitRender();
	InitCapture();
}
创建音频采集线程CaptureThread

 
bool WASAPISource::ProcessCaptureData()
{
	HRESULT res;
	LPBYTE buffer;
	UINT32 frames;
	DWORD flags;
	UINT64 pos, ts;
	UINT captureSize = 0;
	while (true) {
         // 获取采集的数据包大小
		res = capture->GetNextPacketSize(&captureSize);
		if (FAILED(res)) {
			if (res != AUDCLNT_E_DEVICE_INVALIDATED)
				blog(LOG_WARNING,
				     "[WASAPISource::GetCaptureData]"
				     " capture->GetNextPacketSize"
				     " failed: %lX",
				     res);
			return false;
		}
		if (!captureSize)
			break;
        //获取音频数据
		res = capture->GetBuffer(&buffer, &frames, &flags, &pos, &ts);
		if (FAILED(res)) {
			if (res != AUDCLNT_E_DEVICE_INVALIDATED)
				blog(LOG_WARNING,
				     "[WASAPISource::GetCaptureData]"
				     " capture->GetBuffer"
				     " failed: %lX",
				     res);
			return false;
		}
		obs_source_audio data = {};
		data.data[0] = (const uint8_t *)buffer;
		data.frames = (uint32_t)frames;
		data.speakers = speakers;
		data.samples_per_sec = sampleRate;
		data.format = format;
		data.timestamp = useDeviceTiming ? ts * 100 : os_gettime_ns();
		if (!useDeviceTiming)
			data.timestamp -= util_mul_div64(frames, 1000000000ULL,
							 sampleRate);
       //音频数据处理
		obs_source_output_audio(source, &data);
		capture->ReleaseBuffer(frames);
	}
	return true;
}audio_thread

 
 
 

static void input_and_output(struct audio_output *audio, uint64_t audio_time,
			     uint64_t prev_time)
{
	size_t bytes = AUDIO_OUTPUT_FRAMES * audio->block_size;
	struct audio_output_data data[MAX_AUDIO_MIXES];
	uint32_t active_mixes = 0;
	uint64_t new_ts = 0;
	bool success;
	memset(data, 0, sizeof(data));
#ifdef DEBUG_AUDIO
	blog(LOG_DEBUG, "audio_time: %llu, prev_time: %llu, bytes: %lu",
	     audio_time, prev_time, bytes);
#endif
	/* get mixers */
	pthread_mutex_lock(&audio->input_mutex);
	for (size_t i = 0; i < MAX_AUDIO_MIXES; i++) {
		if (audio->mixes[i].inputs.num)
			active_mixes |= (1 << i);
	}
	pthread_mutex_unlock(&audio->input_mutex);
	/* clear mix buffers */
	for (size_t mix_idx = 0; mix_idx < MAX_AUDIO_MIXES; mix_idx++) {
		struct audio_mix *mix = &audio->mixes[mix_idx];
		memset(mix->buffer, 0, sizeof(mix->buffer));
		for (size_t i = 0; i < audio->planes; i++)
			data[mix_idx].data[i] = mix->buffer[i];
	}
	/* get new audio data */
	success = audio->input_cb(audio->input_param, prev_time, audio_time,
				  &new_ts, active_mixes, data);
	if (!success)
		return;
	/* clamps audio data to -1.0..1.0 */
	clamp_audio_output(audio, bytes);
	/* output */
	for (size_t i = 0; i < MAX_AUDIO_MIXES; i++)
		do_audio_output(audio, i, new_ts, AUDIO_OUTPUT_FRAMES);
}最后调用到do_audio_output

static const char *receive_audio_name = "receive_audio";
static void receive_audio(void *param, size_t mix_idx, struct audio_data *in)
{
	profile_start(receive_audio_name);
	struct obs_encoder *encoder = param;
	struct audio_data audio = *in;
	if (!encoder->first_received) {
		encoder->first_raw_ts = audio.timestamp;
		encoder->first_received = true;
		clear_audio(encoder);
	}
	if (audio_pause_check(&encoder->pause, &audio, encoder->samplerate))
		goto end;
	if (!buffer_audio(encoder, &audio))
		goto end;
	while (encoder->audio_input_buffer[0].size >=
	       encoder->framesize_bytes) {
		if (!send_audio_data(encoder)) {
			break;
		}
	}
	UNUSED_PARAMETER(mix_idx);
end:
	profile_end(receive_audio_name);
}send_audio_data 中进行编码


















