adaptive fps moved to pipeline creation.
This commit is contained in:
parent
c45a315d9b
commit
4094639ea9
@ -22,7 +22,7 @@ type CaptureManagerCtx struct {
|
|||||||
|
|
||||||
func New(desktop types.DesktopManager, config *config.Capture) *CaptureManagerCtx {
|
func New(desktop types.DesktopManager, config *config.Capture) *CaptureManagerCtx {
|
||||||
logger := log.With().Str("module", "capture").Logger()
|
logger := log.With().Str("module", "capture").Logger()
|
||||||
manager := &CaptureManagerCtx{
|
return &CaptureManagerCtx{
|
||||||
logger: logger,
|
logger: logger,
|
||||||
desktop: desktop,
|
desktop: desktop,
|
||||||
|
|
||||||
@ -34,13 +34,15 @@ func New(desktop types.DesktopManager, config *config.Capture) *CaptureManagerCt
|
|||||||
return NewAudioPipeline(config.AudioCodec, config.AudioDevice, config.AudioPipeline, config.AudioBitrate)
|
return NewAudioPipeline(config.AudioCodec, config.AudioDevice, config.AudioPipeline, config.AudioBitrate)
|
||||||
}, "audio"),
|
}, "audio"),
|
||||||
video: streamSinkNew(config.VideoCodec, func() (string, error) {
|
video: streamSinkNew(config.VideoCodec, func() (string, error) {
|
||||||
return NewVideoPipeline(config.VideoCodec, config.Display, config.VideoPipeline, config.VideoMaxFPS, config.VideoBitrate, config.VideoHWEnc)
|
// use screen fps as default
|
||||||
|
fps := desktop.GetScreenSize().Rate
|
||||||
|
// if max fps is set, cap it to that value
|
||||||
|
if config.VideoMaxFPS > 0 && config.VideoMaxFPS < fps {
|
||||||
|
fps = config.VideoMaxFPS
|
||||||
|
}
|
||||||
|
return NewVideoPipeline(config.VideoCodec, config.Display, config.VideoPipeline, fps, config.VideoBitrate, config.VideoHWEnc)
|
||||||
}, "video"),
|
}, "video"),
|
||||||
}
|
}
|
||||||
|
|
||||||
manager.Video().SetAdaptiveFramerate(config.VideoAdaptiveFramerate)
|
|
||||||
|
|
||||||
return manager
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (manager *CaptureManagerCtx) Start() {
|
func (manager *CaptureManagerCtx) Start() {
|
||||||
@ -54,7 +56,7 @@ func (manager *CaptureManagerCtx) Start() {
|
|||||||
for {
|
for {
|
||||||
_, ok := <-manager.desktop.GetBeforeScreenSizeChangeChannel()
|
_, ok := <-manager.desktop.GetBeforeScreenSizeChangeChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
manager.logger.Info().Msg("Before screen size change channel was closed")
|
manager.logger.Info().Msg("before screen size change channel was closed")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,14 +72,13 @@ func (manager *CaptureManagerCtx) Start() {
|
|||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for {
|
||||||
framerate, ok := <-manager.desktop.GetAfterScreenSizeChangeChannel()
|
_, ok := <-manager.desktop.GetAfterScreenSizeChangeChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
manager.logger.Info().Msg("After screen size change channel was closed")
|
manager.logger.Info().Msg("after screen size change channel was closed")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if manager.video.Started() {
|
if manager.video.Started() {
|
||||||
manager.video.SetChangeFramerate(framerate)
|
|
||||||
err := manager.video.createPipeline()
|
err := manager.video.createPipeline()
|
||||||
if err != nil && !errors.Is(err, types.ErrCapturePipelineAlreadyExists) {
|
if err != nil && !errors.Is(err, types.ErrCapturePipelineAlreadyExists) {
|
||||||
manager.logger.Panic().Err(err).Msg("unable to recreate video pipeline")
|
manager.logger.Panic().Err(err).Msg("unable to recreate video pipeline")
|
||||||
|
@ -2,8 +2,6 @@ package capture
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
@ -18,16 +16,13 @@ type StreamSinkManagerCtx struct {
|
|||||||
logger zerolog.Logger
|
logger zerolog.Logger
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
|
|
||||||
codec codec.RTPCodec
|
codec codec.RTPCodec
|
||||||
pipeline *gst.Pipeline
|
pipeline *gst.Pipeline
|
||||||
pipelineMu sync.Mutex
|
pipelineMu sync.Mutex
|
||||||
pipelineFn func() (string, error)
|
pipelineFn func() (string, error)
|
||||||
adaptiveFramerate bool
|
|
||||||
|
|
||||||
listeners int
|
listeners int
|
||||||
listenersMu sync.Mutex
|
listenersMu sync.Mutex
|
||||||
|
|
||||||
changeFramerate int16
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func streamSinkNew(codec codec.RTPCodec, pipelineFn func() (string, error), video_id string) *StreamSinkManagerCtx {
|
func streamSinkNew(codec codec.RTPCodec, pipelineFn func() (string, error), video_id string) *StreamSinkManagerCtx {
|
||||||
@ -37,11 +32,9 @@ func streamSinkNew(codec codec.RTPCodec, pipelineFn func() (string, error), vide
|
|||||||
Str("video_id", video_id).Logger()
|
Str("video_id", video_id).Logger()
|
||||||
|
|
||||||
manager := &StreamSinkManagerCtx{
|
manager := &StreamSinkManagerCtx{
|
||||||
logger: logger,
|
logger: logger,
|
||||||
codec: codec,
|
codec: codec,
|
||||||
pipelineFn: pipelineFn,
|
pipelineFn: pipelineFn,
|
||||||
changeFramerate: 0,
|
|
||||||
adaptiveFramerate: false,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager
|
return manager
|
||||||
@ -141,11 +134,6 @@ func (manager *StreamSinkManagerCtx) createPipeline() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if manager.changeFramerate > 0 && manager.adaptiveFramerate {
|
|
||||||
m1 := regexp.MustCompile(`framerate=\d+/1`)
|
|
||||||
pipelineStr = m1.ReplaceAllString(pipelineStr, "framerate="+strconv.FormatInt(int64(manager.changeFramerate), 10)+"/1")
|
|
||||||
}
|
|
||||||
|
|
||||||
manager.logger.Info().
|
manager.logger.Info().
|
||||||
Str("codec", manager.codec.Name).
|
Str("codec", manager.codec.Name).
|
||||||
Str("src", pipelineStr).
|
Str("src", pipelineStr).
|
||||||
@ -187,11 +175,3 @@ func (manager *StreamSinkManagerCtx) GetSampleChannel() chan types.Sample {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (manager *StreamSinkManagerCtx) SetChangeFramerate(rate int16) {
|
|
||||||
manager.changeFramerate = rate
|
|
||||||
}
|
|
||||||
|
|
||||||
func (manager *StreamSinkManagerCtx) SetAdaptiveFramerate(allow bool) {
|
|
||||||
manager.adaptiveFramerate = allow
|
|
||||||
}
|
|
||||||
|
@ -11,13 +11,12 @@ import (
|
|||||||
|
|
||||||
type Capture struct {
|
type Capture struct {
|
||||||
// video
|
// video
|
||||||
Display string
|
Display string
|
||||||
VideoCodec codec.RTPCodec
|
VideoCodec codec.RTPCodec
|
||||||
VideoHWEnc string // TODO: Pipeline builder.
|
VideoHWEnc string // TODO: Pipeline builder.
|
||||||
VideoBitrate uint // TODO: Pipeline builder.
|
VideoBitrate uint // TODO: Pipeline builder.
|
||||||
VideoMaxFPS int16 // TODO: Pipeline builder.
|
VideoMaxFPS int16 // TODO: Pipeline builder.
|
||||||
VideoPipeline string
|
VideoPipeline string
|
||||||
VideoAdaptiveFramerate bool
|
|
||||||
|
|
||||||
// audio
|
// audio
|
||||||
AudioDevice string
|
AudioDevice string
|
||||||
@ -192,13 +191,7 @@ func (s *Capture) Set() {
|
|||||||
s.VideoHWEnc = videoHWEnc
|
s.VideoHWEnc = videoHWEnc
|
||||||
|
|
||||||
s.VideoBitrate = viper.GetUint("video_bitrate")
|
s.VideoBitrate = viper.GetUint("video_bitrate")
|
||||||
s.VideoAdaptiveFramerate = false
|
|
||||||
s.VideoMaxFPS = int16(viper.GetInt("max_fps"))
|
s.VideoMaxFPS = int16(viper.GetInt("max_fps"))
|
||||||
if s.VideoMaxFPS == 0 {
|
|
||||||
// TODO: Get the starting fps from the screen parameter.
|
|
||||||
s.VideoMaxFPS = 30
|
|
||||||
s.VideoAdaptiveFramerate = true
|
|
||||||
}
|
|
||||||
s.VideoPipeline = viper.GetString("video")
|
s.VideoPipeline = viper.GetString("video")
|
||||||
|
|
||||||
//
|
//
|
||||||
|
@ -52,9 +52,10 @@ func (manager *DesktopManagerCtx) Start() {
|
|||||||
for {
|
for {
|
||||||
msg, ok := <-xevent.EventErrorChannel
|
msg, ok := <-xevent.EventErrorChannel
|
||||||
if !ok {
|
if !ok {
|
||||||
manager.logger.Info().Msg("Error channel was closed")
|
manager.logger.Info().Msg("xevent error channel was closed")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
manager.logger.Warn().
|
manager.logger.Warn().
|
||||||
Uint8("error_code", msg.Error_code).
|
Uint8("error_code", msg.Error_code).
|
||||||
Str("message", msg.Message).
|
Str("message", msg.Message).
|
||||||
|
@ -26,8 +26,6 @@ type StreamSinkManager interface {
|
|||||||
ListenersCount() int
|
ListenersCount() int
|
||||||
Started() bool
|
Started() bool
|
||||||
GetSampleChannel() chan Sample
|
GetSampleChannel() chan Sample
|
||||||
SetChangeFramerate(rate int16)
|
|
||||||
SetAdaptiveFramerate(allow bool)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type CaptureManager interface {
|
type CaptureManager interface {
|
||||||
|
@ -63,12 +63,13 @@ func (manager *WebRTCManager) Start() {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
newSample, ok := <-manager.capture.Audio().GetSampleChannel()
|
sample, ok := <-manager.capture.Audio().GetSampleChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
manager.logger.Info().Msg("Audio capture channel was closed")
|
manager.logger.Info().Msg("audio capture channel was closed")
|
||||||
return
|
continue // TOOD: Create this goroutine when creating the pipeline.
|
||||||
}
|
}
|
||||||
err := manager.audioTrack.WriteSample(media.Sample(newSample))
|
|
||||||
|
err := manager.audioTrack.WriteSample(media.Sample(sample))
|
||||||
if err != nil && errors.Is(err, io.ErrClosedPipe) {
|
if err != nil && errors.Is(err, io.ErrClosedPipe) {
|
||||||
manager.logger.Warn().Err(err).Msg("audio pipeline failed to write")
|
manager.logger.Warn().Err(err).Msg("audio pipeline failed to write")
|
||||||
}
|
}
|
||||||
@ -93,12 +94,13 @@ func (manager *WebRTCManager) Start() {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
newSample, ok := <-manager.capture.Video().GetSampleChannel()
|
sample, ok := <-manager.capture.Video().GetSampleChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
manager.logger.Info().Msg("Video capture channel was closed")
|
manager.logger.Info().Msg("video capture channel was closed")
|
||||||
return
|
continue // TOOD: Create this goroutine when creating the pipeline.
|
||||||
}
|
}
|
||||||
err := manager.videoTrack.WriteSample(media.Sample(newSample))
|
|
||||||
|
err := manager.videoTrack.WriteSample(media.Sample(sample))
|
||||||
if err != nil && errors.Is(err, io.ErrClosedPipe) {
|
if err != nil && errors.Is(err, io.ErrClosedPipe) {
|
||||||
manager.logger.Warn().Err(err).Msg("video pipeline failed to write")
|
manager.logger.Warn().Err(err).Msg("video pipeline failed to write")
|
||||||
}
|
}
|
||||||
|
@ -105,8 +105,7 @@ func (ws *WebSocketHandler) Start() {
|
|||||||
for {
|
for {
|
||||||
channelMessage, ok := <-ws.sessions.GetSessionChannel()
|
channelMessage, ok := <-ws.sessions.GetSessionChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
ws.logger.Info().Str("id", channelMessage.Id).Msg("session channel was closed")
|
ws.logger.Info().Msg("session channel was closed")
|
||||||
// channel closed
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -194,11 +193,12 @@ func (ws *WebSocketHandler) Start() {
|
|||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
for {
|
for {
|
||||||
_, ok = <-ws.desktop.GetClipboardUpdatedChannel()
|
_, ok := <-ws.desktop.GetClipboardUpdatedChannel()
|
||||||
if !ok {
|
if !ok {
|
||||||
ws.logger.Info()).Msg("Clipboard update channel closed")
|
ws.logger.Info().Msg("clipboard update channel closed")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
session, ok := ws.sessions.GetHost()
|
session, ok := ws.sessions.GetHost()
|
||||||
if !ok {
|
if !ok {
|
||||||
return
|
return
|
||||||
|
Reference in New Issue
Block a user