2020-10-23 03:54:50 +13:00
|
|
|
package config
|
|
|
|
|
|
|
|
import (
|
2021-03-12 05:55:13 +13:00
|
|
|
"os"
|
|
|
|
|
2021-12-02 10:34:36 +13:00
|
|
|
"github.com/pion/webrtc/v3"
|
2021-03-17 03:24:58 +13:00
|
|
|
"github.com/rs/zerolog/log"
|
2020-10-23 03:54:50 +13:00
|
|
|
"github.com/spf13/cobra"
|
|
|
|
"github.com/spf13/viper"
|
2021-02-02 11:50:18 +13:00
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
"demodesk/neko/internal/types"
|
2021-02-02 11:50:18 +13:00
|
|
|
"demodesk/neko/internal/types/codec"
|
2021-03-29 11:58:51 +13:00
|
|
|
"demodesk/neko/internal/utils"
|
2020-10-23 03:54:50 +13:00
|
|
|
)
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
type Capture struct {
|
2021-03-12 05:55:13 +13:00
|
|
|
Display string
|
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
VideoCodec codec.RTPCodec
|
|
|
|
VideoIDs []string
|
|
|
|
VideoPipelines map[string]types.VideoConfig
|
2021-03-29 11:58:51 +13:00
|
|
|
|
2021-03-12 05:55:13 +13:00
|
|
|
AudioDevice string
|
2021-02-15 02:40:17 +13:00
|
|
|
AudioCodec codec.RTPCodec
|
|
|
|
AudioPipeline string
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-19 01:17:10 +13:00
|
|
|
BroadcastAudioBitrate int
|
|
|
|
BroadcastVideoBitrate int
|
|
|
|
BroadcastPreset string
|
|
|
|
BroadcastPipeline string
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
ScreencastEnabled bool
|
2021-02-06 03:10:41 +13:00
|
|
|
ScreencastRate string
|
|
|
|
ScreencastQuality string
|
2021-01-23 06:13:32 +13:00
|
|
|
ScreencastPipeline string
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
func (Capture) Init(cmd *cobra.Command) error {
|
2021-03-12 05:55:13 +13:00
|
|
|
// audio
|
2021-11-19 09:48:43 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.device", "audio_output.monitor", "audio device to capture")
|
2021-03-17 03:24:58 +13:00
|
|
|
if err := viper.BindPFlag("capture.audio.device", cmd.PersistentFlags().Lookup("capture.audio.device")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.codec", "opus", "audio codec to be used")
|
|
|
|
if err := viper.BindPFlag("capture.audio.codec", cmd.PersistentFlags().Lookup("capture.audio.codec")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.pipeline", "", "gstreamer pipeline used for audio streaming")
|
|
|
|
if err := viper.BindPFlag("capture.audio.pipeline", cmd.PersistentFlags().Lookup("capture.audio.pipeline")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
// videos
|
|
|
|
cmd.PersistentFlags().String("capture.video.codec", "vp8", "video codec to be used")
|
|
|
|
if err := viper.BindPFlag("capture.video.codec", cmd.PersistentFlags().Lookup("capture.video.codec")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().StringSlice("capture.video.ids", []string{}, "ordered list of video ids")
|
|
|
|
if err := viper.BindPFlag("capture.video.ids", cmd.PersistentFlags().Lookup("capture.video.ids")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("capture.video.pipelines", "", "pipelines config in JSON used for video streaming")
|
|
|
|
if err := viper.BindPFlag("capture.video.pipelines", cmd.PersistentFlags().Lookup("capture.video.pipelines")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
// broadcast
|
2021-03-19 01:17:10 +13:00
|
|
|
cmd.PersistentFlags().Int("capture.screencast.audio_bitrate", 128, "broadcast audio bitrate in KB/s")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.audio_bitrate", cmd.PersistentFlags().Lookup("capture.screencast.audio_bitrate")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Int("capture.screencast.video_bitrate", 4096, "broadcast video bitrate in KB/s")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.video_bitrate", cmd.PersistentFlags().Lookup("capture.screencast.video_bitrate")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("capture.screencast.preset", "veryfast", "broadcast speed preset for h264 encoding")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.preset", cmd.PersistentFlags().Lookup("capture.screencast.preset")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.broadcast.pipeline", "", "gstreamer pipeline used for broadcasting")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.pipeline", cmd.PersistentFlags().Lookup("capture.broadcast.pipeline")); err != nil {
|
2020-11-02 04:09:48 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-01-23 06:13:32 +13:00
|
|
|
// screencast
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().Bool("capture.screencast.enabled", false, "enable screencast")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.enabled", cmd.PersistentFlags().Lookup("capture.screencast.enabled")); err != nil {
|
2021-01-23 06:13:32 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.rate", "10/1", "screencast frame rate")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.rate", cmd.PersistentFlags().Lookup("capture.screencast.rate")); err != nil {
|
2021-01-23 06:44:27 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.quality", "60", "screencast JPEG quality")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.quality", cmd.PersistentFlags().Lookup("capture.screencast.quality")); err != nil {
|
2021-01-23 06:44:27 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.pipeline", "", "gstreamer pipeline used for screencasting")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.pipeline", cmd.PersistentFlags().Lookup("capture.screencast.pipeline")); err != nil {
|
2021-01-23 06:13:32 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-10-23 03:54:50 +13:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
func (s *Capture) Set() {
|
2021-12-02 10:34:36 +13:00
|
|
|
var ok bool
|
|
|
|
|
2021-03-12 05:55:13 +13:00
|
|
|
// Display is provided by env variable
|
|
|
|
s.Display = os.Getenv("DISPLAY")
|
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// video
|
2021-03-30 11:36:13 +13:00
|
|
|
videoCodec := viper.GetString("capture.video.codec")
|
2021-12-02 10:34:36 +13:00
|
|
|
s.VideoCodec, ok = codec.ParseStr(videoCodec)
|
|
|
|
if !ok || s.VideoCodec.Type != webrtc.RTPCodecTypeVideo {
|
2021-03-30 11:36:13 +13:00
|
|
|
log.Warn().Str("codec", videoCodec).Msgf("unknown video codec, using Vp8")
|
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
}
|
|
|
|
|
|
|
|
s.VideoIDs = viper.GetStringSlice("capture.video.ids")
|
|
|
|
if err := viper.UnmarshalKey("capture.video.pipelines", &s.VideoPipelines, viper.DecodeHook(
|
|
|
|
utils.JsonStringAutoDecode(s.VideoPipelines),
|
2021-03-29 11:58:51 +13:00
|
|
|
)); err != nil {
|
2021-03-30 11:36:13 +13:00
|
|
|
log.Warn().Err(err).Msgf("unable to parse video pipelines")
|
|
|
|
}
|
|
|
|
|
|
|
|
// default video
|
|
|
|
if len(s.VideoPipelines) == 0 {
|
|
|
|
log.Warn().Msgf("no video pipelines specified, using defaults")
|
2021-03-30 11:37:06 +13:00
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
s.VideoPipelines = map[string]types.VideoConfig{
|
2021-12-02 10:34:36 +13:00
|
|
|
"main": {
|
2021-03-30 11:37:06 +13:00
|
|
|
GstPipeline: "ximagesrc display-name={display} show-pointer=false use-damage=false " +
|
|
|
|
"! video/x-raw " +
|
|
|
|
"! videoconvert " +
|
|
|
|
"! queue " +
|
|
|
|
"! vp8enc end-usage=cbr cpu-used=4 threads=4 deadline=1 keyframe-max-dist=25 " +
|
2021-03-30 11:36:13 +13:00
|
|
|
"! appsink name=appsink",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
s.VideoIDs = []string{"main"}
|
2021-03-29 11:58:51 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// audio
|
2021-03-17 03:24:58 +13:00
|
|
|
s.AudioDevice = viper.GetString("capture.audio.device")
|
|
|
|
s.AudioPipeline = viper.GetString("capture.audio.pipeline")
|
2021-03-12 05:55:13 +13:00
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
audioCodec := viper.GetString("capture.audio.codec")
|
2021-12-02 10:34:36 +13:00
|
|
|
s.AudioCodec, ok = codec.ParseStr(audioCodec)
|
|
|
|
if !ok || s.AudioCodec.Type != webrtc.RTPCodecTypeAudio {
|
2021-03-17 03:24:58 +13:00
|
|
|
log.Warn().Str("codec", audioCodec).Msgf("unknown audio codec, using Opus")
|
2021-03-12 05:55:13 +13:00
|
|
|
s.AudioCodec = codec.Opus()
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// broadcast
|
2021-03-19 01:17:10 +13:00
|
|
|
s.BroadcastAudioBitrate = viper.GetInt("capture.broadcast.audio_bitrate")
|
|
|
|
s.BroadcastVideoBitrate = viper.GetInt("capture.broadcast.video_bitrate")
|
|
|
|
s.BroadcastPreset = viper.GetString("capture.broadcast.preset")
|
2021-03-17 23:19:35 +13:00
|
|
|
s.BroadcastPipeline = viper.GetString("capture.broadcast.pipeline")
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// screencast
|
2021-03-17 03:24:58 +13:00
|
|
|
s.ScreencastEnabled = viper.GetBool("capture.screencast.enabled")
|
|
|
|
s.ScreencastRate = viper.GetString("capture.screencast.rate")
|
|
|
|
s.ScreencastQuality = viper.GetString("capture.screencast.quality")
|
|
|
|
s.ScreencastPipeline = viper.GetString("capture.screencast.pipeline")
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|