2020-10-23 03:54:50 +13:00
|
|
|
package config
|
|
|
|
|
|
|
|
import (
|
2021-03-12 05:55:13 +13:00
|
|
|
"os"
|
2024-07-19 07:48:09 +12:00
|
|
|
"strings"
|
2021-03-12 05:55:13 +13:00
|
|
|
|
2024-07-19 07:48:09 +12:00
|
|
|
"github.com/pion/webrtc/v3"
|
2021-03-17 03:24:58 +13:00
|
|
|
"github.com/rs/zerolog/log"
|
2020-10-23 03:54:50 +13:00
|
|
|
"github.com/spf13/cobra"
|
|
|
|
"github.com/spf13/viper"
|
2021-02-02 11:50:18 +13:00
|
|
|
|
2022-07-14 10:58:22 +12:00
|
|
|
"github.com/demodesk/neko/pkg/types"
|
|
|
|
"github.com/demodesk/neko/pkg/types/codec"
|
|
|
|
"github.com/demodesk/neko/pkg/utils"
|
2020-10-23 03:54:50 +13:00
|
|
|
)
|
|
|
|
|
2024-07-19 07:48:09 +12:00
|
|
|
// Legacy capture configuration
|
|
|
|
type HwEnc int
|
|
|
|
|
|
|
|
// Legacy capture configuration
|
|
|
|
const (
|
|
|
|
HwEncUnset HwEnc = iota
|
|
|
|
HwEncNone
|
|
|
|
HwEncVAAPI
|
|
|
|
HwEncNVENC
|
|
|
|
)
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
type Capture struct {
|
2021-03-12 05:55:13 +13:00
|
|
|
Display string
|
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
VideoCodec codec.RTPCodec
|
|
|
|
VideoIDs []string
|
|
|
|
VideoPipelines map[string]types.VideoConfig
|
2021-03-29 11:58:51 +13:00
|
|
|
|
2021-03-12 05:55:13 +13:00
|
|
|
AudioDevice string
|
2021-02-15 02:40:17 +13:00
|
|
|
AudioCodec codec.RTPCodec
|
|
|
|
AudioPipeline string
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-19 01:17:10 +13:00
|
|
|
BroadcastAudioBitrate int
|
|
|
|
BroadcastVideoBitrate int
|
|
|
|
BroadcastPreset string
|
|
|
|
BroadcastPipeline string
|
2022-09-22 05:08:29 +12:00
|
|
|
BroadcastUrl string
|
2024-07-19 04:58:40 +12:00
|
|
|
BroadcastAutostart bool
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
ScreencastEnabled bool
|
2021-02-06 03:10:41 +13:00
|
|
|
ScreencastRate string
|
|
|
|
ScreencastQuality string
|
2021-01-23 06:13:32 +13:00
|
|
|
ScreencastPipeline string
|
2021-12-10 11:22:24 +13:00
|
|
|
|
|
|
|
WebcamEnabled bool
|
|
|
|
WebcamDevice string
|
2022-01-09 11:53:45 +13:00
|
|
|
WebcamWidth int
|
|
|
|
WebcamHeight int
|
2021-12-10 11:22:24 +13:00
|
|
|
|
|
|
|
MicrophoneEnabled bool
|
|
|
|
MicrophoneDevice string
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
func (Capture) Init(cmd *cobra.Command) error {
|
2021-03-12 05:55:13 +13:00
|
|
|
// audio
|
2021-12-10 11:22:24 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.device", "audio_output.monitor", "pulseaudio device to capture")
|
2021-03-17 03:24:58 +13:00
|
|
|
if err := viper.BindPFlag("capture.audio.device", cmd.PersistentFlags().Lookup("capture.audio.device")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.codec", "opus", "audio codec to be used")
|
|
|
|
if err := viper.BindPFlag("capture.audio.codec", cmd.PersistentFlags().Lookup("capture.audio.codec")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.audio.pipeline", "", "gstreamer pipeline used for audio streaming")
|
|
|
|
if err := viper.BindPFlag("capture.audio.pipeline", cmd.PersistentFlags().Lookup("capture.audio.pipeline")); err != nil {
|
2020-10-23 03:54:50 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
// videos
|
|
|
|
cmd.PersistentFlags().String("capture.video.codec", "vp8", "video codec to be used")
|
|
|
|
if err := viper.BindPFlag("capture.video.codec", cmd.PersistentFlags().Lookup("capture.video.codec")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().StringSlice("capture.video.ids", []string{}, "ordered list of video ids")
|
|
|
|
if err := viper.BindPFlag("capture.video.ids", cmd.PersistentFlags().Lookup("capture.video.ids")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-01-13 12:58:34 +13:00
|
|
|
cmd.PersistentFlags().String("capture.video.pipelines", "[]", "pipelines config in JSON used for video streaming")
|
2021-03-30 11:36:13 +13:00
|
|
|
if err := viper.BindPFlag("capture.video.pipelines", cmd.PersistentFlags().Lookup("capture.video.pipelines")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
// broadcast
|
2021-12-10 10:58:12 +13:00
|
|
|
cmd.PersistentFlags().Int("capture.broadcast.audio_bitrate", 128, "broadcast audio bitrate in KB/s")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.audio_bitrate", cmd.PersistentFlags().Lookup("capture.broadcast.audio_bitrate")); err != nil {
|
2021-03-19 01:17:10 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-12-10 10:58:12 +13:00
|
|
|
cmd.PersistentFlags().Int("capture.broadcast.video_bitrate", 4096, "broadcast video bitrate in KB/s")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.video_bitrate", cmd.PersistentFlags().Lookup("capture.broadcast.video_bitrate")); err != nil {
|
2021-03-19 01:17:10 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-12-10 10:58:12 +13:00
|
|
|
cmd.PersistentFlags().String("capture.broadcast.preset", "veryfast", "broadcast speed preset for h264 encoding")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.preset", cmd.PersistentFlags().Lookup("capture.broadcast.preset")); err != nil {
|
2021-03-19 01:17:10 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.broadcast.pipeline", "", "gstreamer pipeline used for broadcasting")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.pipeline", cmd.PersistentFlags().Lookup("capture.broadcast.pipeline")); err != nil {
|
2020-11-02 04:09:48 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-09-22 05:08:29 +12:00
|
|
|
cmd.PersistentFlags().String("capture.broadcast.url", "", "initial URL for broadcasting, setting this value will automatically start broadcasting")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.url", cmd.PersistentFlags().Lookup("capture.broadcast.url")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-07-19 04:58:40 +12:00
|
|
|
cmd.PersistentFlags().Bool("capture.broadcast.autostart", true, "automatically start broadcasting when neko starts and broadcast_url is set")
|
|
|
|
if err := viper.BindPFlag("capture.broadcast.autostart", cmd.PersistentFlags().Lookup("capture.broadcast.autostart")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-01-23 06:13:32 +13:00
|
|
|
// screencast
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().Bool("capture.screencast.enabled", false, "enable screencast")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.enabled", cmd.PersistentFlags().Lookup("capture.screencast.enabled")); err != nil {
|
2021-01-23 06:13:32 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.rate", "10/1", "screencast frame rate")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.rate", cmd.PersistentFlags().Lookup("capture.screencast.rate")); err != nil {
|
2021-01-23 06:44:27 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.quality", "60", "screencast JPEG quality")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.quality", cmd.PersistentFlags().Lookup("capture.screencast.quality")); err != nil {
|
2021-01-23 06:44:27 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
cmd.PersistentFlags().String("capture.screencast.pipeline", "", "gstreamer pipeline used for screencasting")
|
|
|
|
if err := viper.BindPFlag("capture.screencast.pipeline", cmd.PersistentFlags().Lookup("capture.screencast.pipeline")); err != nil {
|
2021-01-23 06:13:32 +13:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-12-10 11:22:24 +13:00
|
|
|
// webcam
|
|
|
|
cmd.PersistentFlags().Bool("capture.webcam.enabled", false, "enable webcam stream")
|
|
|
|
if err := viper.BindPFlag("capture.webcam.enabled", cmd.PersistentFlags().Lookup("capture.webcam.enabled")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-01-07 10:35:12 +13:00
|
|
|
// sudo apt install v4l2loopback-dkms v4l2loopback-utils
|
|
|
|
// sudo apt-get install linux-headers-`uname -r` linux-modules-extra-`uname -r`
|
|
|
|
// sudo modprobe v4l2loopback exclusive_caps=1
|
2021-12-10 11:22:24 +13:00
|
|
|
cmd.PersistentFlags().String("capture.webcam.device", "/dev/video0", "v4l2sink device used for webcam")
|
|
|
|
if err := viper.BindPFlag("capture.webcam.device", cmd.PersistentFlags().Lookup("capture.webcam.device")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-01-09 11:53:45 +13:00
|
|
|
cmd.PersistentFlags().Int("capture.webcam.width", 1280, "webcam stream width")
|
|
|
|
if err := viper.BindPFlag("capture.webcam.width", cmd.PersistentFlags().Lookup("capture.webcam.width")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Int("capture.webcam.height", 720, "webcam stream height")
|
|
|
|
if err := viper.BindPFlag("capture.webcam.height", cmd.PersistentFlags().Lookup("capture.webcam.height")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-12-10 11:22:24 +13:00
|
|
|
// microphone
|
|
|
|
cmd.PersistentFlags().Bool("capture.microphone.enabled", true, "enable microphone stream")
|
|
|
|
if err := viper.BindPFlag("capture.microphone.enabled", cmd.PersistentFlags().Lookup("capture.microphone.enabled")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("capture.microphone.device", "audio_input", "pulseaudio device used for microphone")
|
|
|
|
if err := viper.BindPFlag("capture.microphone.device", cmd.PersistentFlags().Lookup("capture.microphone.device")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-10-23 03:54:50 +13:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-07-19 07:48:09 +12:00
|
|
|
func (Capture) InitV2(cmd *cobra.Command) error {
|
|
|
|
cmd.PersistentFlags().String("display", "", "XDisplay to capture")
|
|
|
|
if err := viper.BindPFlag("display", cmd.PersistentFlags().Lookup("display")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("video_codec", "", "video codec to be used")
|
|
|
|
if err := viper.BindPFlag("video_codec", cmd.PersistentFlags().Lookup("video_codec")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: video codec
|
|
|
|
cmd.PersistentFlags().Bool("vp8", false, "DEPRECATED: use video_codec")
|
|
|
|
if err := viper.BindPFlag("vp8", cmd.PersistentFlags().Lookup("vp8")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: video codec
|
|
|
|
cmd.PersistentFlags().Bool("vp9", false, "DEPRECATED: use video_codec")
|
|
|
|
if err := viper.BindPFlag("vp9", cmd.PersistentFlags().Lookup("vp9")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: video codec
|
|
|
|
cmd.PersistentFlags().Bool("av1", false, "DEPRECATED: use video_codec")
|
|
|
|
if err := viper.BindPFlag("av1", cmd.PersistentFlags().Lookup("av1")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: video codec
|
|
|
|
cmd.PersistentFlags().Bool("h264", false, "DEPRECATED: use video_codec")
|
|
|
|
if err := viper.BindPFlag("h264", cmd.PersistentFlags().Lookup("h264")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("hwenc", "", "use hardware accelerated encoding")
|
|
|
|
if err := viper.BindPFlag("hwenc", cmd.PersistentFlags().Lookup("hwenc")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Int("video_bitrate", 0, "video bitrate in kbit/s")
|
|
|
|
if err := viper.BindPFlag("video_bitrate", cmd.PersistentFlags().Lookup("video_bitrate")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Int("max_fps", 0, "maximum fps delivered via WebRTC, 0 is for no maximum")
|
|
|
|
if err := viper.BindPFlag("max_fps", cmd.PersistentFlags().Lookup("max_fps")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("video", "", "video codec parameters to use for streaming")
|
|
|
|
if err := viper.BindPFlag("video", cmd.PersistentFlags().Lookup("video")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// audio
|
|
|
|
//
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("device", "", "audio device to capture")
|
|
|
|
if err := viper.BindPFlag("device", cmd.PersistentFlags().Lookup("device")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("audio_codec", "", "audio codec to be used")
|
|
|
|
if err := viper.BindPFlag("audio_codec", cmd.PersistentFlags().Lookup("audio_codec")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: audio codec
|
|
|
|
cmd.PersistentFlags().Bool("opus", false, "DEPRECATED: use audio_codec")
|
|
|
|
if err := viper.BindPFlag("opus", cmd.PersistentFlags().Lookup("opus")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: audio codec
|
|
|
|
cmd.PersistentFlags().Bool("g722", false, "DEPRECATED: use audio_codec")
|
|
|
|
if err := viper.BindPFlag("g722", cmd.PersistentFlags().Lookup("g722")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: audio codec
|
|
|
|
cmd.PersistentFlags().Bool("pcmu", false, "DEPRECATED: use audio_codec")
|
|
|
|
if err := viper.BindPFlag("pcmu", cmd.PersistentFlags().Lookup("pcmu")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// DEPRECATED: audio codec
|
|
|
|
cmd.PersistentFlags().Bool("pcma", false, "DEPRECATED: use audio_codec")
|
|
|
|
if err := viper.BindPFlag("pcma", cmd.PersistentFlags().Lookup("pcma")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
// audio codecs
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Int("audio_bitrate", 0, "audio bitrate in kbit/s")
|
|
|
|
if err := viper.BindPFlag("audio_bitrate", cmd.PersistentFlags().Lookup("audio_bitrate")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("audio", "", "audio codec parameters to use for streaming")
|
|
|
|
if err := viper.BindPFlag("audio", cmd.PersistentFlags().Lookup("audio")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// broadcast
|
|
|
|
//
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("broadcast_pipeline", "", "custom gst pipeline used for broadcasting, strings {url} {device} {display} will be replaced")
|
|
|
|
if err := viper.BindPFlag("broadcast_pipeline", cmd.PersistentFlags().Lookup("broadcast_pipeline")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().String("broadcast_url", "", "a default default URL for broadcast streams, can be disabled/changed later by admins in the GUI")
|
|
|
|
if err := viper.BindPFlag("broadcast_url", cmd.PersistentFlags().Lookup("broadcast_url")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.PersistentFlags().Bool("broadcast_autostart", false, "automatically start broadcasting when neko starts and broadcast_url is set")
|
|
|
|
if err := viper.BindPFlag("broadcast_autostart", cmd.PersistentFlags().Lookup("broadcast_autostart")); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-11-02 04:09:48 +13:00
|
|
|
func (s *Capture) Set() {
|
2021-12-02 10:34:36 +13:00
|
|
|
var ok bool
|
|
|
|
|
2021-03-12 05:55:13 +13:00
|
|
|
// Display is provided by env variable
|
|
|
|
s.Display = os.Getenv("DISPLAY")
|
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// video
|
2021-03-30 11:36:13 +13:00
|
|
|
videoCodec := viper.GetString("capture.video.codec")
|
2021-12-02 10:34:36 +13:00
|
|
|
s.VideoCodec, ok = codec.ParseStr(videoCodec)
|
2023-02-15 09:19:02 +13:00
|
|
|
if !ok || !s.VideoCodec.IsVideo() {
|
2021-03-30 11:36:13 +13:00
|
|
|
log.Warn().Str("codec", videoCodec).Msgf("unknown video codec, using Vp8")
|
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
}
|
|
|
|
|
|
|
|
s.VideoIDs = viper.GetStringSlice("capture.video.ids")
|
|
|
|
if err := viper.UnmarshalKey("capture.video.pipelines", &s.VideoPipelines, viper.DecodeHook(
|
|
|
|
utils.JsonStringAutoDecode(s.VideoPipelines),
|
2021-03-29 11:58:51 +13:00
|
|
|
)); err != nil {
|
2021-03-30 11:36:13 +13:00
|
|
|
log.Warn().Err(err).Msgf("unable to parse video pipelines")
|
|
|
|
}
|
|
|
|
|
|
|
|
// default video
|
|
|
|
if len(s.VideoPipelines) == 0 {
|
|
|
|
log.Warn().Msgf("no video pipelines specified, using defaults")
|
2021-03-30 11:37:06 +13:00
|
|
|
|
2021-03-30 11:36:13 +13:00
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
s.VideoPipelines = map[string]types.VideoConfig{
|
2021-12-02 10:34:36 +13:00
|
|
|
"main": {
|
2023-01-13 12:58:34 +13:00
|
|
|
Fps: "25",
|
|
|
|
GstEncoder: "vp8enc",
|
|
|
|
GstParams: map[string]string{
|
|
|
|
"target-bitrate": "round(3072 * 650)",
|
|
|
|
"cpu-used": "4",
|
|
|
|
"end-usage": "cbr",
|
|
|
|
"threads": "4",
|
|
|
|
"deadline": "1",
|
|
|
|
"undershoot": "95",
|
|
|
|
"buffer-size": "(3072 * 4)",
|
|
|
|
"buffer-initial-size": "(3072 * 2)",
|
|
|
|
"buffer-optimal-size": "(3072 * 3)",
|
|
|
|
"keyframe-max-dist": "25",
|
|
|
|
"min-quantizer": "4",
|
|
|
|
"max-quantizer": "20",
|
|
|
|
},
|
2021-03-30 11:36:13 +13:00
|
|
|
},
|
|
|
|
}
|
|
|
|
s.VideoIDs = []string{"main"}
|
2021-03-29 11:58:51 +13:00
|
|
|
}
|
|
|
|
|
|
|
|
// audio
|
2021-03-17 03:24:58 +13:00
|
|
|
s.AudioDevice = viper.GetString("capture.audio.device")
|
|
|
|
s.AudioPipeline = viper.GetString("capture.audio.pipeline")
|
2021-03-12 05:55:13 +13:00
|
|
|
|
2021-03-17 03:24:58 +13:00
|
|
|
audioCodec := viper.GetString("capture.audio.codec")
|
2021-12-02 10:34:36 +13:00
|
|
|
s.AudioCodec, ok = codec.ParseStr(audioCodec)
|
2023-02-15 09:19:02 +13:00
|
|
|
if !ok || !s.AudioCodec.IsAudio() {
|
2021-03-17 03:24:58 +13:00
|
|
|
log.Warn().Str("codec", audioCodec).Msgf("unknown audio codec, using Opus")
|
2021-03-12 05:55:13 +13:00
|
|
|
s.AudioCodec = codec.Opus()
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// broadcast
|
2021-03-19 01:17:10 +13:00
|
|
|
s.BroadcastAudioBitrate = viper.GetInt("capture.broadcast.audio_bitrate")
|
|
|
|
s.BroadcastVideoBitrate = viper.GetInt("capture.broadcast.video_bitrate")
|
|
|
|
s.BroadcastPreset = viper.GetString("capture.broadcast.preset")
|
2021-03-17 23:19:35 +13:00
|
|
|
s.BroadcastPipeline = viper.GetString("capture.broadcast.pipeline")
|
2022-09-22 05:08:29 +12:00
|
|
|
s.BroadcastUrl = viper.GetString("capture.broadcast.url")
|
2024-07-19 04:58:40 +12:00
|
|
|
s.BroadcastAutostart = viper.GetBool("capture.broadcast.autostart")
|
2021-02-06 03:10:41 +13:00
|
|
|
|
2021-03-29 11:58:51 +13:00
|
|
|
// screencast
|
2021-03-17 03:24:58 +13:00
|
|
|
s.ScreencastEnabled = viper.GetBool("capture.screencast.enabled")
|
|
|
|
s.ScreencastRate = viper.GetString("capture.screencast.rate")
|
|
|
|
s.ScreencastQuality = viper.GetString("capture.screencast.quality")
|
|
|
|
s.ScreencastPipeline = viper.GetString("capture.screencast.pipeline")
|
2021-12-10 11:22:24 +13:00
|
|
|
|
|
|
|
// webcam
|
|
|
|
s.WebcamEnabled = viper.GetBool("capture.webcam.enabled")
|
|
|
|
s.WebcamDevice = viper.GetString("capture.webcam.device")
|
2022-01-09 11:53:45 +13:00
|
|
|
s.WebcamWidth = viper.GetInt("capture.webcam.width")
|
|
|
|
s.WebcamHeight = viper.GetInt("capture.webcam.height")
|
2021-12-10 11:22:24 +13:00
|
|
|
|
|
|
|
// microphone
|
|
|
|
s.MicrophoneEnabled = viper.GetBool("capture.microphone.enabled")
|
|
|
|
s.MicrophoneDevice = viper.GetString("capture.microphone.device")
|
2020-10-23 03:54:50 +13:00
|
|
|
}
|
2024-07-19 07:48:09 +12:00
|
|
|
|
|
|
|
func (s *Capture) SetV2() {
|
|
|
|
var ok bool
|
|
|
|
|
|
|
|
//
|
|
|
|
// video
|
|
|
|
//
|
|
|
|
|
|
|
|
if display := viper.GetString("display"); display != "" {
|
|
|
|
s.Display = display
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_DISPLAY' which is deprecated, please use 'DISPLAY' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
if videoCodec := viper.GetString("video_codec"); videoCodec != "" {
|
|
|
|
s.VideoCodec, ok = codec.ParseStr(videoCodec)
|
|
|
|
if !ok || s.VideoCodec.Type != webrtc.RTPCodecTypeVideo {
|
|
|
|
log.Warn().Str("codec", videoCodec).Msgf("unknown video codec, using Vp8")
|
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
}
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_VIDEO_CODEC' which is deprecated, please use 'NEKO_CAPTURE_VIDEO_CODEC' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
if viper.GetBool("vp8") {
|
|
|
|
s.VideoCodec = codec.VP8()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_VP8=true', use 'NEKO_CAPTURE_VIDEO_CODEC=vp8' instead")
|
|
|
|
} else if viper.GetBool("vp9") {
|
|
|
|
s.VideoCodec = codec.VP9()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_VP9=true', use 'NEKO_CAPTURE_VIDEO_CODEC=vp9' instead")
|
|
|
|
} else if viper.GetBool("h264") {
|
|
|
|
s.VideoCodec = codec.H264()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_H264=true', use 'NEKO_CAPTURE_VIDEO_CODEC=h264' instead")
|
|
|
|
} else if viper.GetBool("av1") {
|
|
|
|
s.VideoCodec = codec.AV1()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_AV1=true', use 'NEKO_CAPTURE_VIDEO_CODEC=av1' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
videoHWEnc := HwEncUnset
|
|
|
|
if hwenc := strings.ToLower(viper.GetString("hwenc")); hwenc != "" {
|
|
|
|
switch hwenc {
|
|
|
|
case "none":
|
|
|
|
videoHWEnc = HwEncNone
|
|
|
|
case "vaapi":
|
|
|
|
videoHWEnc = HwEncVAAPI
|
|
|
|
case "nvenc":
|
|
|
|
videoHWEnc = HwEncNVENC
|
|
|
|
default:
|
|
|
|
log.Warn().Str("hwenc", hwenc).Msgf("unknown video hw encoder, using CPU")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
videoBitrate := viper.GetUint("video_bitrate")
|
|
|
|
videoMaxFPS := int16(viper.GetInt("max_fps"))
|
|
|
|
videoPipeline := viper.GetString("video")
|
|
|
|
|
|
|
|
// video pipeline
|
|
|
|
if videoHWEnc != HwEncUnset || videoBitrate != 0 || videoMaxFPS != 0 || videoPipeline != "" {
|
|
|
|
pipeline, err := NewVideoPipeline(s.VideoCodec, s.Display, videoPipeline, videoMaxFPS, videoBitrate, videoHWEnc)
|
|
|
|
if err != nil {
|
|
|
|
log.Warn().Err(err).Msg("unable to create video pipeline, using default")
|
|
|
|
} else {
|
|
|
|
s.VideoPipelines = map[string]types.VideoConfig{
|
|
|
|
"main": {
|
|
|
|
GstPipeline: pipeline,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
// TODO: add deprecated warning and proper alternative
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// audio
|
|
|
|
//
|
|
|
|
|
|
|
|
if audioDevice := viper.GetString("device"); audioDevice != "" {
|
|
|
|
s.AudioDevice = audioDevice
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_DEVICE' which is deprecated, please use 'NEKO_CAPTURE_AUDIO_DEVICE' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
if audioCodec := viper.GetString("audio_codec"); audioCodec != "" {
|
|
|
|
s.AudioCodec, ok = codec.ParseStr(audioCodec)
|
|
|
|
if !ok || s.AudioCodec.Type != webrtc.RTPCodecTypeAudio {
|
|
|
|
log.Warn().Str("codec", audioCodec).Msgf("unknown audio codec, using Opus")
|
|
|
|
s.AudioCodec = codec.Opus()
|
|
|
|
}
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_AUDIO_CODEC' which is deprecated, please use 'NEKO_CAPTURE_AUDIO_CODEC' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
if viper.GetBool("opus") {
|
|
|
|
s.AudioCodec = codec.Opus()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_OPUS=true', use 'NEKO_CAPTURE_AUDIO_CODEC=opus' instead")
|
|
|
|
} else if viper.GetBool("g722") {
|
|
|
|
s.AudioCodec = codec.G722()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_G722=true', use 'NEKO_CAPTURE_AUDIO_CODEC=g722' instead")
|
|
|
|
} else if viper.GetBool("pcmu") {
|
|
|
|
s.AudioCodec = codec.PCMU()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_PCMU=true', use 'NEKO_CAPTURE_AUDIO_CODEC=pcmu' instead")
|
|
|
|
} else if viper.GetBool("pcma") {
|
|
|
|
s.AudioCodec = codec.PCMA()
|
|
|
|
log.Warn().Msg("you are using deprecated config setting 'NEKO_PCMA=true', use 'NEKO_CAPTURE_AUDIO_CODEC=pcma' instead")
|
|
|
|
}
|
|
|
|
|
|
|
|
audioBitrate := viper.GetUint("audio_bitrate")
|
|
|
|
audioPipeline := viper.GetString("audio")
|
|
|
|
|
|
|
|
// audio pipeline
|
|
|
|
if audioBitrate != 0 || audioPipeline != "" {
|
|
|
|
pipeline, err := NewAudioPipeline(s.AudioCodec, s.AudioDevice, audioPipeline, audioBitrate)
|
|
|
|
if err != nil {
|
|
|
|
log.Warn().Err(err).Msg("unable to create audio pipeline, using default")
|
|
|
|
} else {
|
|
|
|
s.AudioPipeline = pipeline
|
|
|
|
}
|
|
|
|
// TODO: add deprecated warning and proper alternative
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// broadcast
|
|
|
|
//
|
|
|
|
|
|
|
|
if viper.IsSet("broadcast_pipeline") {
|
|
|
|
s.BroadcastPipeline = viper.GetString("broadcast_pipeline")
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_BROADCAST_PIPELINE' which is deprecated, please use 'NEKO_CAPTURE_BROADCAST_PIPELINE' instead")
|
|
|
|
}
|
|
|
|
if viper.IsSet("broadcast_url") {
|
|
|
|
s.BroadcastUrl = viper.GetString("broadcast_url")
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_BROADCAST_URL' which is deprecated, please use 'NEKO_CAPTURE_BROADCAST_URL' instead")
|
|
|
|
}
|
|
|
|
if viper.IsSet("broadcast_autostart") {
|
|
|
|
s.BroadcastAutostart = viper.GetBool("broadcast_autostart")
|
|
|
|
log.Warn().Msg("you are using v2 configuration 'NEKO_BROADCAST_AUTOSTART' which is deprecated, please use 'NEKO_CAPTURE_BROADCAST_AUTOSTART' instead")
|
|
|
|
}
|
|
|
|
}
|