adaptive fps moved to pipeline creation.

This commit is contained in:
Miroslav Šedivý 2023-01-29 17:40:07 +01:00
parent c45a315d9b
commit 4094639ea9
7 changed files with 40 additions and 65 deletions

View File

@ -22,7 +22,7 @@ type CaptureManagerCtx struct {
func New(desktop types.DesktopManager, config *config.Capture) *CaptureManagerCtx {
logger := log.With().Str("module", "capture").Logger()
manager := &CaptureManagerCtx{
return &CaptureManagerCtx{
logger: logger,
desktop: desktop,
@ -34,13 +34,15 @@ func New(desktop types.DesktopManager, config *config.Capture) *CaptureManagerCt
return NewAudioPipeline(config.AudioCodec, config.AudioDevice, config.AudioPipeline, config.AudioBitrate)
}, "audio"),
video: streamSinkNew(config.VideoCodec, func() (string, error) {
return NewVideoPipeline(config.VideoCodec, config.Display, config.VideoPipeline, config.VideoMaxFPS, config.VideoBitrate, config.VideoHWEnc)
// use screen fps as default
fps := desktop.GetScreenSize().Rate
// if max fps is set, cap it to that value
if config.VideoMaxFPS > 0 && config.VideoMaxFPS < fps {
fps = config.VideoMaxFPS
}
return NewVideoPipeline(config.VideoCodec, config.Display, config.VideoPipeline, fps, config.VideoBitrate, config.VideoHWEnc)
}, "video"),
}
manager.Video().SetAdaptiveFramerate(config.VideoAdaptiveFramerate)
return manager
}
func (manager *CaptureManagerCtx) Start() {
@ -54,7 +56,7 @@ func (manager *CaptureManagerCtx) Start() {
for {
_, ok := <-manager.desktop.GetBeforeScreenSizeChangeChannel()
if !ok {
manager.logger.Info().Msg("Before screen size change channel was closed")
manager.logger.Info().Msg("before screen size change channel was closed")
return
}
@ -70,14 +72,13 @@ func (manager *CaptureManagerCtx) Start() {
go func() {
for {
framerate, ok := <-manager.desktop.GetAfterScreenSizeChangeChannel()
_, ok := <-manager.desktop.GetAfterScreenSizeChangeChannel()
if !ok {
manager.logger.Info().Msg("After screen size change channel was closed")
manager.logger.Info().Msg("after screen size change channel was closed")
return
}
if manager.video.Started() {
manager.video.SetChangeFramerate(framerate)
err := manager.video.createPipeline()
if err != nil && !errors.Is(err, types.ErrCapturePipelineAlreadyExists) {
manager.logger.Panic().Err(err).Msg("unable to recreate video pipeline")

View File

@ -2,8 +2,6 @@ package capture
import (
"errors"
"regexp"
"strconv"
"sync"
"github.com/rs/zerolog"
@ -18,16 +16,13 @@ type StreamSinkManagerCtx struct {
logger zerolog.Logger
mu sync.Mutex
codec codec.RTPCodec
pipeline *gst.Pipeline
pipelineMu sync.Mutex
pipelineFn func() (string, error)
adaptiveFramerate bool
codec codec.RTPCodec
pipeline *gst.Pipeline
pipelineMu sync.Mutex
pipelineFn func() (string, error)
listeners int
listenersMu sync.Mutex
changeFramerate int16
}
func streamSinkNew(codec codec.RTPCodec, pipelineFn func() (string, error), video_id string) *StreamSinkManagerCtx {
@ -37,11 +32,9 @@ func streamSinkNew(codec codec.RTPCodec, pipelineFn func() (string, error), vide
Str("video_id", video_id).Logger()
manager := &StreamSinkManagerCtx{
logger: logger,
codec: codec,
pipelineFn: pipelineFn,
changeFramerate: 0,
adaptiveFramerate: false,
logger: logger,
codec: codec,
pipelineFn: pipelineFn,
}
return manager
@ -141,11 +134,6 @@ func (manager *StreamSinkManagerCtx) createPipeline() error {
return err
}
if manager.changeFramerate > 0 && manager.adaptiveFramerate {
m1 := regexp.MustCompile(`framerate=\d+/1`)
pipelineStr = m1.ReplaceAllString(pipelineStr, "framerate="+strconv.FormatInt(int64(manager.changeFramerate), 10)+"/1")
}
manager.logger.Info().
Str("codec", manager.codec.Name).
Str("src", pipelineStr).
@ -187,11 +175,3 @@ func (manager *StreamSinkManagerCtx) GetSampleChannel() chan types.Sample {
return nil
}
func (manager *StreamSinkManagerCtx) SetChangeFramerate(rate int16) {
manager.changeFramerate = rate
}
func (manager *StreamSinkManagerCtx) SetAdaptiveFramerate(allow bool) {
manager.adaptiveFramerate = allow
}

View File

@ -11,13 +11,12 @@ import (
type Capture struct {
// video
Display string
VideoCodec codec.RTPCodec
VideoHWEnc string // TODO: Pipeline builder.
VideoBitrate uint // TODO: Pipeline builder.
VideoMaxFPS int16 // TODO: Pipeline builder.
VideoPipeline string
VideoAdaptiveFramerate bool
Display string
VideoCodec codec.RTPCodec
VideoHWEnc string // TODO: Pipeline builder.
VideoBitrate uint // TODO: Pipeline builder.
VideoMaxFPS int16 // TODO: Pipeline builder.
VideoPipeline string
// audio
AudioDevice string
@ -192,13 +191,7 @@ func (s *Capture) Set() {
s.VideoHWEnc = videoHWEnc
s.VideoBitrate = viper.GetUint("video_bitrate")
s.VideoAdaptiveFramerate = false
s.VideoMaxFPS = int16(viper.GetInt("max_fps"))
if s.VideoMaxFPS == 0 {
// TODO: Get the starting fps from the screen parameter.
s.VideoMaxFPS = 30
s.VideoAdaptiveFramerate = true
}
s.VideoPipeline = viper.GetString("video")
//

View File

@ -52,9 +52,10 @@ func (manager *DesktopManagerCtx) Start() {
for {
msg, ok := <-xevent.EventErrorChannel
if !ok {
manager.logger.Info().Msg("Error channel was closed")
manager.logger.Info().Msg("xevent error channel was closed")
return
}
manager.logger.Warn().
Uint8("error_code", msg.Error_code).
Str("message", msg.Message).

View File

@ -26,8 +26,6 @@ type StreamSinkManager interface {
ListenersCount() int
Started() bool
GetSampleChannel() chan Sample
SetChangeFramerate(rate int16)
SetAdaptiveFramerate(allow bool)
}
type CaptureManager interface {

View File

@ -63,12 +63,13 @@ func (manager *WebRTCManager) Start() {
continue
}
newSample, ok := <-manager.capture.Audio().GetSampleChannel()
sample, ok := <-manager.capture.Audio().GetSampleChannel()
if !ok {
manager.logger.Info().Msg("Audio capture channel was closed")
return
manager.logger.Info().Msg("audio capture channel was closed")
continue // TOOD: Create this goroutine when creating the pipeline.
}
err := manager.audioTrack.WriteSample(media.Sample(newSample))
err := manager.audioTrack.WriteSample(media.Sample(sample))
if err != nil && errors.Is(err, io.ErrClosedPipe) {
manager.logger.Warn().Err(err).Msg("audio pipeline failed to write")
}
@ -93,12 +94,13 @@ func (manager *WebRTCManager) Start() {
continue
}
newSample, ok := <-manager.capture.Video().GetSampleChannel()
sample, ok := <-manager.capture.Video().GetSampleChannel()
if !ok {
manager.logger.Info().Msg("Video capture channel was closed")
return
manager.logger.Info().Msg("video capture channel was closed")
continue // TOOD: Create this goroutine when creating the pipeline.
}
err := manager.videoTrack.WriteSample(media.Sample(newSample))
err := manager.videoTrack.WriteSample(media.Sample(sample))
if err != nil && errors.Is(err, io.ErrClosedPipe) {
manager.logger.Warn().Err(err).Msg("video pipeline failed to write")
}

View File

@ -105,8 +105,7 @@ func (ws *WebSocketHandler) Start() {
for {
channelMessage, ok := <-ws.sessions.GetSessionChannel()
if !ok {
ws.logger.Info().Str("id", channelMessage.Id).Msg("session channel was closed")
// channel closed
ws.logger.Info().Msg("session channel was closed")
return
}
@ -194,11 +193,12 @@ func (ws *WebSocketHandler) Start() {
go func() {
for {
_, ok = <-ws.desktop.GetClipboardUpdatedChannel()
_, ok := <-ws.desktop.GetClipboardUpdatedChannel()
if !ok {
ws.logger.Info()).Msg("Clipboard update channel closed")
ws.logger.Info().Msg("clipboard update channel closed")
return
}
session, ok := ws.sessions.GetHost()
if !ok {
return