Browse Source

Add native support for the raspberry pi camera (#1057)

pull/1128/head
Alessandro Ros 3 years ago committed by aler9
parent
commit
8c2b12a314
  1. 3
      .dockerignore
  2. 86
      Makefile
  3. 36
      README.md
  4. 8
      apidocs/openapi.yaml
  5. 13
      internal/conf/env_test.go
  6. 31
      internal/conf/path.go
  7. 8
      internal/core/api.go
  8. 2
      internal/core/hls_source.go
  9. 8
      internal/core/path.go
  10. 94
      internal/core/rpicamera_source.go
  11. 2
      internal/core/rtmp_source.go
  12. 2
      internal/core/rtsp_source.go
  13. 70
      internal/core/source_static.go
  14. 85
      internal/rpicamera/embeddedexe.go
  15. 40
      internal/rpicamera/exe/Makefile
  16. 209
      internal/rpicamera/exe/camera.cpp
  17. 17
      internal/rpicamera/exe/camera.h
  18. 315
      internal/rpicamera/exe/encoder.c
  19. 7
      internal/rpicamera/exe/encoder.h
  20. 117
      internal/rpicamera/exe/main.c
  21. 36
      internal/rpicamera/exe/parameters.c
  22. 22
      internal/rpicamera/exe/parameters.h
  23. 13
      internal/rpicamera/params.go
  24. 69
      internal/rpicamera/pipe.go
  25. 134
      internal/rpicamera/rpicamera.go
  26. 23
      internal/rpicamera/rpicamera_disabled.go
  27. 11
      rtsp-simple-server.yml

3
.dockerignore

@ -1,3 +1,6 @@ @@ -1,3 +1,6 @@
# do not add .git, since it is needed to extract the tag
/tmp
/release
/coverage*.txt
/apidocs/*.html

86
Makefile

@ -2,6 +2,8 @@ @@ -2,6 +2,8 @@
BASE_IMAGE = golang:1.18-alpine3.15
LINT_IMAGE = golangci/golangci-lint:v1.45.2
NODE_IMAGE = node:16-alpine3.15
RPI32_IMAGE = balenalib/raspberrypi3:buster-run
RPI64_IMAGE = balenalib/raspberrypi3-64:buster-run
.PHONY: $(shell ls)
@ -162,17 +164,6 @@ apidocs-lint: @@ -162,17 +164,6 @@ apidocs-lint:
docker run --rm -v $(PWD)/apidocs:/s -w /s temp \
sh -c "openapi lint openapi.yaml"
define DOCKERFILE_RELEASE
FROM $(BASE_IMAGE)
RUN apk add --no-cache zip make git tar
WORKDIR /s
COPY go.mod go.sum ./
RUN go mod download
COPY . ./
RUN make release-nodocker
endef
export DOCKERFILE_RELEASE
define DOCKERFILE_APIDOCS_GEN
FROM $(NODE_IMAGE)
RUN yarn global add redoc-cli@0.13.7
@ -184,36 +175,63 @@ apidocs-gen: @@ -184,36 +175,63 @@ apidocs-gen:
docker run --rm -v $(PWD)/apidocs:/s -w /s temp \
sh -c "redoc-cli bundle openapi.yaml"
release:
echo "$$DOCKERFILE_RELEASE" | docker build . -f - -t temp
docker run --rm -v $(PWD):/out \
temp sh -c "rm -rf /out/release && cp -r /s/release /out/"
define DOCKERFILE_RELEASE
FROM $(RPI32_IMAGE) AS rpicamera32
RUN ["cross-build-start"]
RUN apt update && apt install -y g++ pkg-config make libcamera-dev
WORKDIR /s/internal/rpicamera
COPY internal/rpicamera .
RUN cd exe && make
FROM $(RPI64_IMAGE) AS rpicamera64
RUN ["cross-build-start"]
RUN apt update && apt install -y g++ pkg-config make libcamera-dev
WORKDIR /s/internal/rpicamera
COPY internal/rpicamera .
RUN cd exe && make
release-nodocker:
$(eval export CGO_ENABLED=0)
$(eval VERSION := $(shell git describe --tags))
$(eval GOBUILD := go build -ldflags '-X github.com/aler9/rtsp-simple-server/internal/core.version=$(VERSION)')
rm -rf tmp && mkdir tmp
rm -rf release && mkdir release
cp rtsp-simple-server.yml tmp/
FROM $(BASE_IMAGE)
RUN apk add --no-cache zip make git tar
WORKDIR /s
COPY go.mod go.sum ./
RUN go mod download
COPY . ./
GOOS=windows GOARCH=amd64 $(GOBUILD) -o tmp/rtsp-simple-server.exe
cd tmp && zip -q $(PWD)/release/rtsp-simple-server_$(VERSION)_windows_amd64.zip rtsp-simple-server.exe rtsp-simple-server.yml
ENV VERSION $(shell git describe --tags)
ENV CGO_ENABLED 0
RUN mkdir tmp release
RUN cp rtsp-simple-server.yml tmp/
GOOS=linux GOARCH=amd64 $(GOBUILD) -o tmp/rtsp-simple-server
tar -C tmp -czf $(PWD)/release/rtsp-simple-server_$(VERSION)_linux_amd64.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN GOOS=windows GOARCH=amd64 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server.exe
RUN cd tmp && zip -q ../release/rtsp-simple-server_$${VERSION}_windows_amd64.zip rtsp-simple-server.exe rtsp-simple-server.yml
GOOS=linux GOARCH=arm GOARM=6 $(GOBUILD) -o tmp/rtsp-simple-server
tar -C tmp -czf $(PWD)/release/rtsp-simple-server_$(VERSION)_linux_armv6.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN GOOS=linux GOARCH=amd64 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server
RUN tar -C tmp -czf release/rtsp-simple-server_$${VERSION}_linux_amd64.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
GOOS=linux GOARCH=arm GOARM=7 $(GOBUILD) -o tmp/rtsp-simple-server
tar -C tmp -czf $(PWD)/release/rtsp-simple-server_$(VERSION)_linux_armv7.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN GOOS=darwin GOARCH=amd64 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server
RUN tar -C tmp -czf release/rtsp-simple-server_$${VERSION}_darwin_amd64.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
GOOS=linux GOARCH=arm64 $(GOBUILD) -o tmp/rtsp-simple-server
tar -C tmp -czf $(PWD)/release/rtsp-simple-server_$(VERSION)_linux_arm64v8.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
COPY --from=rpicamera32 /s/internal/rpicamera/exe/exe internal/rpicamera/exe/
RUN GOOS=linux GOARCH=arm GOARM=6 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server -tags rpicamera
RUN tar -C tmp -czf release/rtsp-simple-server_$${VERSION}_linux_armv6.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN rm internal/rpicamera/exe/exe
GOOS=darwin GOARCH=amd64 $(GOBUILD) -o tmp/rtsp-simple-server
tar -C tmp -czf $(PWD)/release/rtsp-simple-server_$(VERSION)_darwin_amd64.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
COPY --from=rpicamera32 /s/internal/rpicamera/exe/exe internal/rpicamera/exe/
RUN GOOS=linux GOARCH=arm GOARM=7 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server -tags rpicamera
RUN tar -C tmp -czf release/rtsp-simple-server_$${VERSION}_linux_armv7.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN rm internal/rpicamera/exe/exe
COPY --from=rpicamera64 /s/internal/rpicamera/exe/exe internal/rpicamera/exe/
RUN GOOS=linux GOARCH=arm64 go build -ldflags "-X github.com/aler9/rtsp-simple-server/internal/core.version=$$VERSION" -o tmp/rtsp-simple-server -tags rpicamera
RUN tar -C tmp -czf release/rtsp-simple-server_$${VERSION}_linux_arm64v8.tar.gz --owner=0 --group=0 rtsp-simple-server rtsp-simple-server.yml
RUN rm internal/rpicamera/exe/exe
endef
export DOCKERFILE_RELEASE
release:
echo "$$DOCKERFILE_RELEASE" | DOCKER_BUILDKIT=1 docker build . -f - -t temp
docker run --rm -v $(PWD):/out \
temp sh -c "rm -rf /out/release && cp -r /s/release /out/"
define DOCKERFILE_DOCKERHUB
FROM --platform=linux/amd64 $(BASE_IMAGE) AS build

36
README.md

@ -15,16 +15,17 @@ Features: @@ -15,16 +15,17 @@ Features:
* Publish live streams to the server
* Read live streams from the server
* Act as a proxy and serve streams from other servers or cameras, always or on-demand
* Each stream can have multiple video and audio tracks, encoded with any codec, including H264, H265, VP8, VP9, MPEG2, MP3, AAC, Opus, PCM, JPEG
* Proxy streams from other servers or cameras, always or on-demand
* Each stream can have multiple video and audio tracks, encoded with any RTP-compatible codec, including H264, H265, VP8, VP9, MPEG2, MP3, AAC, Opus, PCM, JPEG
* Streams are automatically converted from a protocol to another. For instance, it's possible to publish a stream with RTSP and read it with HLS
* Serve multiple streams at once in separate paths
* Authenticate users; use internal or external authentication
* Redirect readers to other RTSP servers (load balancing)
* Query and control the server through an HTTP API
* Reload the configuration without disconnecting existing clients (hot reloading)
* Read Prometheus-compatible metrics
* Redirect readers to other RTSP servers (load balancing)
* Run external commands when clients connect, disconnect, read or publish streams
* Reload the configuration without disconnecting existing clients (hot reloading)
* Natively compatible with the Raspberry Pi Camera
* Compatible with Linux, Windows and macOS, does not require any dependency or interpreter, it's a single executable
[![Test](https://github.com/aler9/rtsp-simple-server/workflows/test/badge.svg)](https://github.com/aler9/rtsp-simple-server/actions?query=workflow:test)
@ -361,7 +362,7 @@ The command inserted into `runOnDemand` will start only when a client requests t @@ -361,7 +362,7 @@ The command inserted into `runOnDemand` will start only when a client requests t
#### Linux
Systemd is the service manager used by Ubuntu, Debian and many other Linux distributions, and allows to launch rtsp-simple-server on boot.
Systemd is the service manager used by Ubuntu, Debian and many other Linux distributions, and allows to launch _rtsp-simple-server_ on boot.
Download a release bundle from the [release page](https://github.com/aler9/rtsp-simple-server/releases), unzip it, and move the executable and configuration in the system:
@ -523,26 +524,27 @@ After starting the server, the webcam can be reached on `rtsp://localhost:8554/c @@ -523,26 +524,27 @@ After starting the server, the webcam can be reached on `rtsp://localhost:8554/c
### From a Raspberry Pi Camera
To publish the video stream of a Raspberry Pi Camera to the server, install a couple of dependencies:
_rtsp-simple-server_ natively support the Raspberry Pi Camera, enabling high-quality and low-latency video streaming from the camera to any user. To make the video stream of a Raspberry Pi Camera available on the server:
1. The server must be installed on a Raspberry Pi, with Raspberry Pi OS bullseye or newer as operative system, and must be installed by using the standard method (Docker is not actually supported). If you're using the 64-bit version of the operative system, you need to pick the `arm64` variant of the server.
1. _GStreamer_ and _h264parse_:
2. Make sure that the legacy camera stack is disabled. Type:
```
sudo apt install -y gstreamer1.0-tools gstreamer1.0-rtsp gstreamer1.0-plugins-bad
sudo raspi-config
```
2. _gst-rpicamsrc_, by following [instruction here](https://github.com/thaytan/gst-rpicamsrc)
Then go to `Interfacing options`, `enable/disable legacy camera support`, choose `no`. Reboot the system.
Then edit `rtsp-simple-server.yml` and replace everything inside section `paths` with the following content:
3. edit `rtsp-simple-server.yml` and replace everything inside section `paths` with the following content:
```yml
paths:
cam:
runOnInit: gst-launch-1.0 rpicamsrc preview=false bitrate=2000000 keyframe-interval=50 ! video/x-h264,width=1920,height=1080,framerate=25/1 ! h264parse ! rtspclientsink location=rtsp://localhost:$RTSP_PORT/$RTSP_PATH
runOnInitRestart: yes
```
```yml
paths:
cam:
source: rpiCamera
```
After starting the server, the camera is available on `rtsp://localhost:8554/cam`.
After starting the server, the camera can be reached on `rtsp://raspberry-pi:8554/cam` or `http://raspberry-pi:8888/cam`.
### From OBS Studio

8
apidocs/openapi.yaml

@ -199,6 +199,7 @@ components: @@ -199,6 +199,7 @@ components:
- $ref: '#/components/schemas/PathSourceRTSPSource'
- $ref: '#/components/schemas/PathSourceRTMPSource'
- $ref: '#/components/schemas/PathSourceHLSSource'
- $ref: '#/components/schemas/PathSourceRPICameraSource'
sourceReady:
type: boolean
tracks:
@ -275,6 +276,13 @@ components: @@ -275,6 +276,13 @@ components:
type: string
enum: [hlsSource]
PathSourceRPICameraSource:
type: object
properties:
type:
type: string
enum: [rpiCameraSource]
PathReaderRTSPSession:
type: object
properties:

13
internal/conf/env_test.go

@ -8,8 +8,17 @@ import ( @@ -8,8 +8,17 @@ import (
"github.com/stretchr/testify/require"
)
type subStruct struct {
// int
MyParam int
}
type mapEntry struct {
// string
MyValue string
// struct
MyStruct subStruct
}
type testStruct struct {
@ -48,6 +57,9 @@ func TestEnvironment(t *testing.T) { @@ -48,6 +57,9 @@ func TestEnvironment(t *testing.T) {
os.Setenv("MYPREFIX_MYMAP_MYKEY2_MYVALUE", "asd")
defer os.Unsetenv("MYPREFIX_MYMAP_MYKEY2_MYVALUE")
os.Setenv("MYPREFIX_MYMAP_MYKEY2_MYSTRUCT_MYPARAM", "456")
defer os.Unsetenv("MYPREFIX_MYMAP_MYKEY2_MYSTRUCT_MYPARAM")
var s testStruct
err := loadFromEnvironment("MYPREFIX", &s)
require.NoError(t, err)
@ -63,4 +75,5 @@ func TestEnvironment(t *testing.T) { @@ -63,4 +75,5 @@ func TestEnvironment(t *testing.T) {
v, ok := s.MyMap["mykey2"]
require.Equal(t, true, ok)
require.Equal(t, "asd", v.MyValue)
require.Equal(t, 456, v.MyStruct.MyParam)
}

31
internal/conf/path.go

@ -49,6 +49,14 @@ type PathConf struct { @@ -49,6 +49,14 @@ type PathConf struct {
SourceRedirect string `json:"sourceRedirect"`
DisablePublisherOverride bool `json:"disablePublisherOverride"`
Fallback string `json:"fallback"`
RPICameraCamID int `json:"rpiCameraCamID"`
RPICameraWidth int `json:"rpiCameraWidth"`
RPICameraHeight int `json:"rpiCameraHeight"`
RPICameraFPS int `json:"rpiCameraFPS"`
RPICameraIDRPeriod int `json:"rpiCameraIDRPeriod"`
RPICameraBitrate int `json:"rpiCameraBitrate"`
RPICameraProfile string `json:"rpiCameraProfile"`
RPICameraLevel string `json:"rpiCameraLevel"`
// authentication
PublishUser Credential `json:"publishUser"`
@ -165,6 +173,29 @@ func (pconf *PathConf) checkAndFillMissing(conf *Conf, name string) error { @@ -165,6 +173,29 @@ func (pconf *PathConf) checkAndFillMissing(conf *Conf, name string) error {
return fmt.Errorf("'%s' is not a valid RTSP URL", pconf.SourceRedirect)
}
case pconf.Source == "rpiCamera":
if pconf.RPICameraWidth == 0 {
pconf.RPICameraWidth = 1280
}
if pconf.RPICameraHeight == 0 {
pconf.RPICameraHeight = 720
}
if pconf.RPICameraFPS == 0 {
pconf.RPICameraFPS = 30
}
if pconf.RPICameraIDRPeriod == 0 {
pconf.RPICameraIDRPeriod = 60
}
if pconf.RPICameraBitrate == 0 {
pconf.RPICameraBitrate = 1000000
}
if pconf.RPICameraProfile == "" {
pconf.RPICameraProfile = "main"
}
if pconf.RPICameraLevel == "" {
pconf.RPICameraLevel = "4.1"
}
default:
return fmt.Errorf("invalid source: '%s'", pconf.Source)
}

8
internal/core/api.go

@ -115,6 +115,14 @@ func loadConfPathData(ctx *gin.Context) (interface{}, error) { @@ -115,6 +115,14 @@ func loadConfPathData(ctx *gin.Context) (interface{}, error) {
SourceRedirect *string `json:"sourceRedirect"`
DisablePublisherOverride *bool `json:"disablePublisherOverride"`
Fallback *string `json:"fallback"`
RPICameraCamID *int `json:"rpiCameraCamID"`
RPICameraWidth *int `json:"rpiCameraWidth"`
RPICameraHeight *int `json:"rpiCameraHeight"`
RPICameraFPS *int `json:"rpiCameraFPS"`
RPICameraIDRPeriod *int `json:"rpiCameraIDRPeriod"`
RPICameraBitrate *int `json:"rpiCameraBitrate"`
RPICameraProfile *string `json:"rpiCameraProfile"`
RPICameraLevel *string `json:"rpiCameraLevel"`
// authentication
PublishUser *conf.Credential `json:"publishUser"`

2
internal/core/hls_source.go

@ -72,7 +72,7 @@ func (s *hlsSource) run(ctx context.Context) error { @@ -72,7 +72,7 @@ func (s *hlsSource) run(ctx context.Context) error {
return res.err
}
s.Log(logger.Info, "proxying %s", sourceTrackInfo(tracks))
s.Log(logger.Info, "ready: %s", sourceTrackInfo(tracks))
stream = res.stream
return nil

8
internal/core/path.go

@ -335,7 +335,8 @@ func (pa *path) hasStaticSource() bool { @@ -335,7 +335,8 @@ func (pa *path) hasStaticSource() bool {
strings.HasPrefix(pa.conf.Source, "rtsps://") ||
strings.HasPrefix(pa.conf.Source, "rtmp://") ||
strings.HasPrefix(pa.conf.Source, "http://") ||
strings.HasPrefix(pa.conf.Source, "https://")
strings.HasPrefix(pa.conf.Source, "https://") ||
pa.conf.Source == "rpiCamera"
}
func (pa *path) hasOnDemandStaticSource() bool {
@ -353,10 +354,7 @@ func (pa *path) run() { @@ -353,10 +354,7 @@ func (pa *path) run() {
pa.source = &sourceRedirect{}
} else if pa.hasStaticSource() {
pa.source = newSourceStatic(
pa.conf.Source,
pa.conf.SourceProtocol,
pa.conf.SourceAnyPortEnable,
pa.conf.SourceFingerprint,
pa.conf,
pa.readTimeout,
pa.writeTimeout,
pa.readBufferCount,

94
internal/core/rpicamera_source.go

@ -0,0 +1,94 @@ @@ -0,0 +1,94 @@
package core
import (
"context"
"time"
"github.com/aler9/gortsplib"
"github.com/aler9/gortsplib/pkg/h264"
"github.com/aler9/gortsplib/pkg/rtph264"
"github.com/aler9/rtsp-simple-server/internal/logger"
"github.com/aler9/rtsp-simple-server/internal/rpicamera"
)
type rpiCameraSourceParent interface {
log(logger.Level, string, ...interface{})
sourceStaticImplSetReady(req pathSourceStaticSetReadyReq) pathSourceStaticSetReadyRes
sourceStaticImplSetNotReady(req pathSourceStaticSetNotReadyReq)
}
type rpiCameraSource struct {
params rpicamera.Params
parent rpiCameraSourceParent
}
func newRPICameraSource(
params rpicamera.Params,
parent rpiCameraSourceParent,
) *rpiCameraSource {
return &rpiCameraSource{
params: params,
parent: parent,
}
}
func (s *rpiCameraSource) Log(level logger.Level, format string, args ...interface{}) {
s.parent.log(level, "[rpicamera source] "+format, args...)
}
// run implements sourceStaticImpl.
func (s *rpiCameraSource) run(ctx context.Context) error {
track := &gortsplib.TrackH264{PayloadType: 96}
tracks := gortsplib.Tracks{track}
enc := &rtph264.Encoder{PayloadType: 96}
enc.Init()
var stream *stream
var start time.Time
onData := func(nalus [][]byte) {
if stream == nil {
res := s.parent.sourceStaticImplSetReady(pathSourceStaticSetReadyReq{
tracks: tracks,
})
if res.err != nil {
return
}
s.Log(logger.Info, "ready: %s", sourceTrackInfo(tracks))
stream = res.stream
start = time.Now()
}
pts := time.Since(start)
stream.writeData(&data{
trackID: 0,
ptsEqualsDTS: h264.IDRPresent(nalus),
pts: pts,
h264NALUs: nalus,
})
}
cam, err := rpicamera.New(s.params, onData)
if err != nil {
return err
}
defer cam.Close()
defer func() {
if stream != nil {
s.parent.sourceStaticImplSetNotReady(pathSourceStaticSetNotReadyReq{})
}
}()
<-ctx.Done()
return nil
}
// apiSourceDescribe implements sourceStaticImpl.
func (*rpiCameraSource) apiSourceDescribe() interface{} {
return struct {
Type string `json:"type"`
}{"rpiCameraSource"}
}

2
internal/core/rtmp_source.go

@ -113,7 +113,7 @@ func (s *rtmpSource) run(ctx context.Context) error { @@ -113,7 +113,7 @@ func (s *rtmpSource) run(ctx context.Context) error {
return res.err
}
s.Log(logger.Info, "proxying %s", sourceTrackInfo(tracks))
s.Log(logger.Info, "ready: %s", sourceTrackInfo(tracks))
defer func() {
s.parent.sourceStaticImplSetNotReady(pathSourceStaticSetNotReadyReq{})

2
internal/core/rtsp_source.go

@ -133,7 +133,7 @@ func (s *rtspSource) run(ctx context.Context) error { @@ -133,7 +133,7 @@ func (s *rtspSource) run(ctx context.Context) error {
return res.err
}
s.Log(logger.Info, "proxying %s", sourceTrackInfo(tracks))
s.Log(logger.Info, "ready: %s", sourceTrackInfo(tracks))
defer func() {
s.parent.sourceStaticImplSetNotReady(pathSourceStaticSetNotReadyReq{})

70
internal/core/source_static.go

@ -8,6 +8,7 @@ import ( @@ -8,6 +8,7 @@ import (
"github.com/aler9/rtsp-simple-server/internal/conf"
"github.com/aler9/rtsp-simple-server/internal/logger"
"github.com/aler9/rtsp-simple-server/internal/rpicamera"
)
const (
@ -28,14 +29,7 @@ type sourceStaticParent interface { @@ -28,14 +29,7 @@ type sourceStaticParent interface {
// sourceStatic is a static source.
type sourceStatic struct {
ur string
protocol conf.SourceProtocol
anyPortEnable bool
fingerprint string
readTimeout conf.StringDuration
writeTimeout conf.StringDuration
readBufferCount int
parent sourceStaticParent
parent sourceStaticParent
ctx context.Context
ctxCancel func()
@ -48,53 +42,57 @@ type sourceStatic struct { @@ -48,53 +42,57 @@ type sourceStatic struct {
}
func newSourceStatic(
ur string,
protocol conf.SourceProtocol,
anyPortEnable bool,
fingerprint string,
conf *conf.PathConf,
readTimeout conf.StringDuration,
writeTimeout conf.StringDuration,
readBufferCount int,
parent sourceStaticParent,
) *sourceStatic {
s := &sourceStatic{
ur: ur,
protocol: protocol,
anyPortEnable: anyPortEnable,
fingerprint: fingerprint,
readTimeout: readTimeout,
writeTimeout: writeTimeout,
readBufferCount: readBufferCount,
parent: parent,
chSourceStaticImplSetReady: make(chan pathSourceStaticSetReadyReq),
chSourceStaticImplSetNotReady: make(chan pathSourceStaticSetNotReadyReq),
}
switch {
case strings.HasPrefix(s.ur, "rtsp://") ||
strings.HasPrefix(s.ur, "rtsps://"):
case strings.HasPrefix(conf.Source, "rtsp://") ||
strings.HasPrefix(conf.Source, "rtsps://"):
s.impl = newRTSPSource(
s.ur,
s.protocol,
s.anyPortEnable,
s.fingerprint,
s.readTimeout,
s.writeTimeout,
s.readBufferCount,
conf.Source,
conf.SourceProtocol,
conf.SourceAnyPortEnable,
conf.SourceFingerprint,
readTimeout,
writeTimeout,
readBufferCount,
s)
case strings.HasPrefix(s.ur, "rtmp://"):
case strings.HasPrefix(conf.Source, "rtmp://"):
s.impl = newRTMPSource(
s.ur,
s.readTimeout,
s.writeTimeout,
conf.Source,
readTimeout,
writeTimeout,
s)
case strings.HasPrefix(s.ur, "http://") ||
strings.HasPrefix(s.ur, "https://"):
case strings.HasPrefix(conf.Source, "http://") ||
strings.HasPrefix(conf.Source, "https://"):
s.impl = newHLSSource(
s.ur,
s.fingerprint,
conf.Source,
conf.SourceFingerprint,
s)
case conf.Source == "rpiCamera":
s.impl = newRPICameraSource(
rpicamera.Params{
CameraID: conf.RPICameraCamID,
Width: conf.RPICameraWidth,
Height: conf.RPICameraHeight,
FPS: conf.RPICameraFPS,
IDRPeriod: conf.RPICameraIDRPeriod,
Bitrate: conf.RPICameraBitrate,
Profile: conf.RPICameraProfile,
Level: conf.RPICameraLevel,
},
s)
}

85
internal/rpicamera/embeddedexe.go

@ -0,0 +1,85 @@ @@ -0,0 +1,85 @@
//go:build rpicamera
// +build rpicamera
package rpicamera
import (
"fmt"
"os"
"os/exec"
"runtime"
"strconv"
"time"
)
const (
tempPathPrefix = "/dev/shm/rtspss-embeddedexe-"
)
func getKernelArch() (string, error) {
cmd := exec.Command("uname", "-m")
byts, err := cmd.Output()
if err != nil {
return "", err
}
return string(byts[:len(byts)-1]), nil
}
// 32-bit embedded executables can't run on 64-bit.
func checkArch() error {
if runtime.GOARCH != "arm" {
return nil
}
arch, err := getKernelArch()
if err != nil {
return err
}
if arch == "aarch64" {
return fmt.Errorf("OS is 64-bit, you need the arm64 server version")
}
return nil
}
type embeddedExe struct {
cmd *exec.Cmd
}
func newEmbeddedExe(content []byte, env []string) (*embeddedExe, error) {
err := checkArch()
if err != nil {
return nil, err
}
tempPath := tempPathPrefix + strconv.FormatInt(time.Now().UnixNano(), 10)
err = os.WriteFile(tempPath, content, 0o755)
if err != nil {
return nil, err
}
cmd := exec.Command(tempPath)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = env
err = cmd.Start()
os.Remove(tempPath)
if err != nil {
return nil, err
}
return &embeddedExe{
cmd: cmd,
}, nil
}
func (e *embeddedExe) close() {
e.cmd.Process.Kill()
e.cmd.Wait()
}

40
internal/rpicamera/exe/Makefile

@ -0,0 +1,40 @@ @@ -0,0 +1,40 @@
CC = gcc
CXX = g++
CFLAGS = \
-Ofast \
-Werror \
-Wall \
-Wextra \
-Wno-unused-parameter
CXXFLAGS = \
-Ofast \
-Werror \
-Wall \
-Wextra \
-Wno-unused-parameter \
-std=c++17 \
$$(pkg-config --cflags libcamera)
LDFLAGS = \
-s \
-pthread \
$$(pkg-config --libs libcamera)
OBJS = \
camera.o \
encoder.o \
main.o \
parameters.o
all: exe
%.o: %.c
$(CC) $(CFLAGS) -c $< -o $@
%.o: %.cpp
$(CXX) $(CXXFLAGS) -c $< -o $@
exe: $(OBJS)
$(CXX) $(LDFLAGS) -o $@ $^

209
internal/rpicamera/exe/camera.cpp

@ -0,0 +1,209 @@ @@ -0,0 +1,209 @@
#include <stdio.h>
#include <stdarg.h>
#include <cstring>
#include <sys/mman.h>
#include <iostream>
#include <libcamera/camera_manager.h>
#include <libcamera/camera.h>
#include <libcamera/formats.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/framebuffer_allocator.h>
#include <linux/videodev2.h>
#include "parameters.h"
#include "camera.h"
using libcamera::CameraManager;
using libcamera::CameraConfiguration;
using libcamera::Camera;
using libcamera::StreamRoles;
using libcamera::StreamRole;
using libcamera::StreamConfiguration;
using libcamera::Stream;
using libcamera::ControlList;
using libcamera::FrameBufferAllocator;
using libcamera::FrameBuffer;
using libcamera::Request;
using libcamera::Span;
namespace controls = libcamera::controls;
namespace formats = libcamera::formats;
char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *camera_get_error() {
return errbuf;
}
struct CameraPriv {
parameters_t *params;
camera_frame_cb frame_cb;
std::unique_ptr<CameraManager> camera_manager;
std::shared_ptr<Camera> camera;
std::unique_ptr<FrameBufferAllocator> allocator;
std::vector<std::unique_ptr<Request>> requests;
};
static int get_v4l2_colorspace(std::optional<libcamera::ColorSpace> const &cs) {
if (cs == libcamera::ColorSpace::Rec709) {
return V4L2_COLORSPACE_REC709;
}
return V4L2_COLORSPACE_SMPTE170M;
}
bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **cam) {
std::unique_ptr<CameraPriv> camp = std::make_unique<CameraPriv>();
camp->camera_manager = std::make_unique<CameraManager>();
int ret = camp->camera_manager->start();
if (ret != 0) {
set_error("CameraManager.start() failed");
return false;
}
std::vector<std::shared_ptr<libcamera::Camera>> cameras = camp->camera_manager->cameras();
auto rem = std::remove_if(cameras.begin(), cameras.end(),
[](auto &cam) { return cam->id().find("/usb") != std::string::npos; });
cameras.erase(rem, cameras.end());
if (params->camera_id >= cameras.size()){
set_error("selected camera is not available");
return false;
}
camp->camera = camp->camera_manager->get(cameras[params->camera_id]->id());
if (camp->camera == NULL) {
set_error("CameraManager.get() failed");
return false;
}
ret = camp->camera->acquire();
if (ret != 0) {
set_error("Camera.acquire() failed");
return false;
}
StreamRoles stream_roles = { StreamRole::VideoRecording };
std::unique_ptr<CameraConfiguration> conf = camp->camera->generateConfiguration(stream_roles);
if (conf == NULL) {
set_error("Camera.generateConfiguration() failed");
return false;
}
StreamConfiguration &stream_conf = conf->at(0);
stream_conf.pixelFormat = formats::YUV420;
stream_conf.bufferCount = params->buffer_count;
stream_conf.size.width = params->width;
stream_conf.size.height = params->height;
if (params->width >= 1280 || params->height >= 720) {
stream_conf.colorSpace = libcamera::ColorSpace::Rec709;
} else {
stream_conf.colorSpace = libcamera::ColorSpace::Smpte170m;
}
CameraConfiguration::Status vstatus = conf->validate();
if (vstatus == CameraConfiguration::Invalid) {
set_error("StreamConfiguration.validate() failed");
return false;
}
int res = camp->camera->configure(conf.get());
if (res != 0) {
set_error("Camera.configure() failed");
return false;
}
Stream *stream = stream_conf.stream();
camp->allocator = std::make_unique<FrameBufferAllocator>(camp->camera);
res = camp->allocator->allocate(stream);
if (res < 0) {
set_error("allocate() failed");
return false;
}
for (const std::unique_ptr<FrameBuffer> &buffer : camp->allocator->buffers(stream)) {
std::unique_ptr<Request> request = camp->camera->createRequest((uint64_t)camp.get());
if (request == NULL) {
set_error("createRequest() failed");
return false;
}
int res = request->addBuffer(stream, buffer.get());
if (res != 0) {
set_error("addBuffer() failed");
return false;
}
camp->requests.push_back(std::move(request));
}
camp->params = params;
camp->frame_cb = frame_cb;
*cam = camp.release();
return true;
}
static void on_request_complete(Request *request) {
if (request->status() == Request::RequestCancelled) {
return;
}
CameraPriv *camp = (CameraPriv *)request->cookie();
FrameBuffer *buffer = request->buffers().begin()->second;
int size = 0;
for (const FrameBuffer::Plane &plane : buffer->planes()) {
size += plane.length;
}
camp->frame_cb(buffer->planes()[0].fd.get(), size, buffer->metadata().timestamp / 1000);
request->reuse(Request::ReuseFlag::ReuseBuffers);
camp->camera->queueRequest(request);
}
int camera_get_stride(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
return (*camp->camera->streams().begin())->configuration().stride;
}
int camera_get_colorspace(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
return get_v4l2_colorspace((*camp->camera->streams().begin())->configuration().colorSpace);
}
bool camera_start(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam;
ControlList ctrls = ControlList(controls::controls);
int64_t frame_time = 1000000 / camp->params->fps;
ctrls.set(controls::FrameDurationLimits, Span<const int64_t, 2>({ frame_time, frame_time }));
int res = camp->camera->start(&ctrls);
if (res != 0) {
set_error("Camera.start() failed");
return false;
}
camp->camera->requestCompleted.connect(on_request_complete);
for (std::unique_ptr<Request> &request : camp->requests) {
int res = camp->camera->queueRequest(request.get());
if (res != 0) {
set_error("Camera.queueRequest() failed");
return false;
}
}
return true;
}

17
internal/rpicamera/exe/camera.h

@ -0,0 +1,17 @@ @@ -0,0 +1,17 @@
typedef void camera_t;
typedef void (*camera_frame_cb)(int buffer_fd, uint64_t size, uint64_t timestamp);
#ifdef __cplusplus
extern "C" {
#endif
const char *camera_get_error();
bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **cam);
int camera_get_stride(camera_t *cam);
int camera_get_colorspace(camera_t *cam);
bool camera_start(camera_t *cam);
#ifdef __cplusplus
}
#endif

315
internal/rpicamera/exe/encoder.c

@ -0,0 +1,315 @@ @@ -0,0 +1,315 @@
#include <stdbool.h>
#include <stdio.h>
#include <stdarg.h>
#include <stdlib.h>
#include <stdint.h>
#include <fcntl.h>
#include <unistd.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <errno.h>
#include <poll.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include "parameters.h"
#include "encoder.h"
char errbuf[256];
static void set_error(const char *format, ...) {
va_list args;
va_start(args, format);
vsnprintf(errbuf, 256, format, args);
}
const char *encoder_get_error() {
return errbuf;
}
typedef struct {
parameters_t *params;
int fd;
void **capture_buffers;
int cur_buffer;
encoder_output_cb output_cb;
pthread_t output_thread;
} encoder_priv_t;
static void *output_thread(void *userdata) {
encoder_priv_t *encp = (encoder_priv_t *)userdata;
while (true) {
struct pollfd p = { encp->fd, POLLIN, 0 };
int res = poll(&p, 1, 200);
if (res == -1) {
fprintf(stderr, "output_thread(): poll() failed\n");
exit(1);
}
if (p.revents & POLLIN) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.length = 1;
buf.m.planes = planes;
int res = ioctl(encp->fd, VIDIOC_DQBUF, &buf);
if (res != 0) {
fprintf(stderr, "output_thread(): ioctl() failed\n");
exit(1);
}
memset(&buf, 0, sizeof(buf));
memset(planes, 0, sizeof(planes));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.length = 1;
buf.m.planes = planes;
res = ioctl(encp->fd, VIDIOC_DQBUF, &buf);
if (res == 0) {
const uint8_t *bufmem = (const uint8_t *)encp->capture_buffers[buf.index];
int bufsize = buf.m.planes[0].bytesused;
encp->output_cb(bufmem, bufsize);
int index = buf.index;
int length = buf.m.planes[0].length;
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = index;
buf.length = 1;
buf.m.planes = planes;
buf.m.planes[0].bytesused = 0;
buf.m.planes[0].length = length;
int res = ioctl(encp->fd, VIDIOC_QBUF, &buf);
if (res < 0) {
fprintf(stderr, "output_thread(): ioctl() failed\n");
exit(1);
}
}
}
}
return NULL;
}
bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_output_cb output_cb, encoder_t **enc) {
*enc = malloc(sizeof(encoder_priv_t));
encoder_priv_t *encp = (encoder_priv_t *)(*enc);
encp->fd = open("/dev/video11", O_RDWR, 0);
if (encp->fd < 0) {
set_error("unable to open device");
return false;
}
struct v4l2_control ctrl = {0};
ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
ctrl.value = params->bitrate;
int res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set bitrate");
close(encp->fd);
return false;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
ctrl.value = params->profile;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set profile");
close(encp->fd);
return false;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
ctrl.value = params->level;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set level");
close(encp->fd);
return false;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD;
ctrl.value = params->idr_period;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set IDR period");
close(encp->fd);
return false;
}
ctrl.id = V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER;
ctrl.value = 0;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) {
set_error("unable to set REPEAT_SEQ_HEADER");
close(encp->fd);
return false;
}
struct v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = stride;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = colorspace;
fmt.fmt.pix_mp.num_planes = 1;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) {
set_error("unable to set output format");
close(encp->fd);
return false;
}
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_H264;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_DEFAULT;
fmt.fmt.pix_mp.num_planes = 1;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) {
set_error("unable to set capture format");
close(encp->fd);
return false;
}
struct v4l2_streamparm parm = {0};
parm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
parm.parm.output.timeperframe.numerator = 1;
parm.parm.output.timeperframe.denominator = params->fps;
res = ioctl(encp->fd, VIDIOC_S_PARM, &parm);
if (res != 0) {
set_error("unable to set fps");
close(encp->fd);
return false;
}
struct v4l2_requestbuffers reqbufs = {0};
reqbufs.count = params->buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
reqbufs.memory = V4L2_MEMORY_DMABUF;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) {
set_error("unable to set output buffers");
close(encp->fd);
return false;
}
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = params->capture_buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) {
set_error("unable to set capture buffers");
close(encp->fd);
return false;
}
encp->capture_buffers = malloc(sizeof(void *) * reqbufs.count);
for (unsigned int i = 0; i < reqbufs.count; i++) {
struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buffer = {0};
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
buffer.length = 1;
buffer.m.planes = planes;
int res = ioctl(encp->fd, VIDIOC_QUERYBUF, &buffer);
if (res != 0) {
set_error("unable to query buffer");
free(encp->capture_buffers);
close(encp->fd);
return false;
}
encp->capture_buffers[i] = mmap(
0,
buffer.m.planes[0].length,
PROT_READ | PROT_WRITE, MAP_SHARED,
encp->fd,
buffer.m.planes[0].m.mem_offset);
if (encp->capture_buffers[i] == MAP_FAILED) {
set_error("mmap() failed");
free(encp->capture_buffers);
close(encp->fd);
return false;
}
res = ioctl(encp->fd, VIDIOC_QBUF, &buffer);
if (res != 0) {
set_error("ioctl() failed");
free(encp->capture_buffers);
close(encp->fd);
return false;
}
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
res = ioctl(encp->fd, VIDIOC_STREAMON, &type);
if (res != 0) {
set_error("unable to activate output stream");
free(encp->capture_buffers);
close(encp->fd);
return false;
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
res = ioctl(encp->fd, VIDIOC_STREAMON, &type);
if (res != 0) {
set_error("unable to activate capture stream");
free(encp->capture_buffers);
close(encp->fd);
return false;
}
encp->params = params;
encp->cur_buffer = 0;
encp->output_cb = output_cb;
pthread_create(&encp->output_thread, NULL, output_thread, encp);
return true;
}
void encoder_encode(encoder_t *enc, int buffer_fd, size_t size, int64_t timestamp_us) {
encoder_priv_t *encp = (encoder_priv_t *)enc;
int index = encp->cur_buffer++;
encp->cur_buffer %= encp->params->buffer_count;
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.index = index;
buf.field = V4L2_FIELD_NONE;
buf.memory = V4L2_MEMORY_DMABUF;
buf.length = 1;
buf.timestamp.tv_sec = timestamp_us / 1000000;
buf.timestamp.tv_usec = timestamp_us % 1000000;
buf.m.planes = planes;
buf.m.planes[0].m.fd = buffer_fd;
buf.m.planes[0].bytesused = size;
buf.m.planes[0].length = size;
int res = ioctl(encp->fd, VIDIOC_QBUF, &buf);
if (res != 0) {
fprintf(stderr, "encoder_encode(): ioctl() failed\n");
exit(1);
}
}

7
internal/rpicamera/exe/encoder.h

@ -0,0 +1,7 @@ @@ -0,0 +1,7 @@
typedef void encoder_t;
typedef void (*encoder_output_cb)(const uint8_t *buf, uint64_t size);
const char *encoder_get_error();
bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_output_cb output_cb, encoder_t **enc);
void encoder_encode(encoder_t *enc, int buffer_fd, size_t size, int64_t timestamp_us);

117
internal/rpicamera/exe/main.c

@ -0,0 +1,117 @@ @@ -0,0 +1,117 @@
#include <stdio.h>
#include <stdbool.h>
#include <stdarg.h>
#include <stdint.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <signal.h>
#include <string.h>
#include <pthread.h>
#include "parameters.h"
#include "camera.h"
#include "encoder.h"
int pipe_fd;
pthread_mutex_t pipe_mutex;
parameters_t params;
camera_t *cam;
encoder_t *enc;
static void pipe_write_error(int fd, const char *format, ...) {
char buf[256];
buf[0] = 'e';
va_list args;
va_start(args, format);
vsnprintf(&buf[1], 255, format, args);
int n = strlen(buf);
write(fd, &n, 4);
write(fd, buf, n);
}
static void pipe_write_ready(int fd) {
char buf[] = {'r'};
int n = 1;
write(fd, &n, 4);
write(fd, buf, n);
}
static void pipe_write_buf(int fd, const uint8_t *buf, int n) {
char head[] = {'b'};
n++;
write(fd, &n, 4);
write(fd, head, 1);
write(fd, buf, n-1);
}
static void on_frame(int buffer_fd, uint64_t size, uint64_t timestamp) {
encoder_encode(enc, buffer_fd, size, timestamp);
}
static void on_encoder_output(const uint8_t *buf, uint64_t size) {
pthread_mutex_lock(&pipe_mutex);
pipe_write_buf(pipe_fd, buf, size);
pthread_mutex_unlock(&pipe_mutex);
}
static bool init_siglistener(sigset_t *set) {
sigemptyset(set);
int res = sigaddset(set, SIGKILL);
if (res == -1) {
return false;
}
return true;
}
int main() {
pipe_fd = atoi(getenv("PIPE_FD"));
pthread_mutex_init(&pipe_mutex, NULL);
pthread_mutex_lock(&pipe_mutex);
parameters_load(&params);
bool ok = camera_create(
&params,
on_frame,
&cam);
if (!ok) {
pipe_write_error(pipe_fd, "camera_create(): %s", camera_get_error());
return 5;
}
ok = encoder_create(
&params,
camera_get_stride(cam),
camera_get_colorspace(cam),
on_encoder_output,
&enc);
if (!ok) {
pipe_write_error(pipe_fd, "encoder_create(): %s", encoder_get_error());
return 5;
}
ok = camera_start(cam);
if (!ok) {
pipe_write_error(pipe_fd, "camera_start(): %s", camera_get_error());
return 5;
}
sigset_t set;
ok = init_siglistener(&set);
if (!ok) {
pipe_write_error(pipe_fd, "init_siglistener() failed");
return 5;
}
pipe_write_ready(pipe_fd);
pthread_mutex_unlock(&pipe_mutex);
int sig;
sigwait(&set, &sig);
return 0;
}

36
internal/rpicamera/exe/parameters.c

@ -0,0 +1,36 @@ @@ -0,0 +1,36 @@
#include <stdlib.h>
#include <string.h>
#include <linux/videodev2.h>
#include "parameters.h"
void parameters_load(parameters_t *params) {
params->camera_id = atoi(getenv("CAMERA_ID"));
params->width = atoi(getenv("WIDTH"));
params->height = atoi(getenv("HEIGHT"));
params->fps = atoi(getenv("FPS"));
params->idr_period = atoi(getenv("IDR_PERIOD"));
params->bitrate = atoi(getenv("BITRATE"));
const char *profile = getenv("PROFILE");
if (strcmp(profile, "baseline") == 0) {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
} else if (strcmp(profile, "main") == 0) {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
} else {
params->profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
}
const char *level = getenv("LEVEL");
if (strcmp(level, "4.0") == 0) {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
} else if (strcmp(level, "4.1") == 0) {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
} else {
params->level = V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
}
params->buffer_count = 3;
params->capture_buffer_count = params->buffer_count * 2;
}

22
internal/rpicamera/exe/parameters.h

@ -0,0 +1,22 @@ @@ -0,0 +1,22 @@
typedef struct {
unsigned int camera_id;
unsigned int width;
unsigned int height;
unsigned int fps;
unsigned int idr_period;
unsigned int bitrate;
unsigned int profile;
unsigned int level;
unsigned int buffer_count;
unsigned int capture_buffer_count;
} parameters_t;
#ifdef __cplusplus
extern "C" {
#endif
void parameters_load(parameters_t *params);
#ifdef __cplusplus
}
#endif

13
internal/rpicamera/params.go

@ -0,0 +1,13 @@ @@ -0,0 +1,13 @@
package rpicamera
// Params is a set of camera parameters.
type Params struct {
CameraID int
Width int
Height int
FPS int
IDRPeriod int
Bitrate int
Profile string
Level string
}

69
internal/rpicamera/pipe.go

@ -0,0 +1,69 @@ @@ -0,0 +1,69 @@
//go:build rpicamera
// +build rpicamera
package rpicamera
import (
"encoding/binary"
"syscall"
)
func syscallReadAll(fd int, buf []byte) error {
size := len(buf)
read := 0
for {
n, err := syscall.Read(fd, buf[read:size])
if err != nil {
return err
}
read += n
if read >= size {
break
}
}
return nil
}
type pipe struct {
readFD int
writeFD int
}
func newPipe() (*pipe, error) {
fds := make([]int, 2)
err := syscall.Pipe(fds)
if err != nil {
return nil, err
}
return &pipe{
readFD: fds[0],
writeFD: fds[1],
}, nil
}
func (p *pipe) close() {
syscall.Close(p.readFD)
syscall.Close(p.writeFD)
}
func (p *pipe) read() ([]byte, error) {
sizebuf := make([]byte, 4)
err := syscallReadAll(p.readFD, sizebuf)
if err != nil {
return nil, err
}
size := int(binary.LittleEndian.Uint32(sizebuf))
buf := make([]byte, size)
err = syscallReadAll(p.readFD, buf)
if err != nil {
return nil, err
}
return buf, nil
}

134
internal/rpicamera/rpicamera.go

@ -0,0 +1,134 @@ @@ -0,0 +1,134 @@
//go:build rpicamera
// +build rpicamera
package rpicamera
import (
_ "embed"
"fmt"
"strconv"
"github.com/aler9/gortsplib/pkg/h264"
)
//go:embed exe/exe
var exeContent []byte
type RPICamera struct {
onData func([][]byte)
exe *embeddedExe
pipe *pipe
waitDone chan error
readerDone chan error
}
func New(
params Params,
onData func([][]byte),
) (*RPICamera, error) {
pipe, err := newPipe()
if err != nil {
return nil, err
}
env := []string{
"PIPE_FD=" + strconv.FormatInt(int64(pipe.writeFD), 10),
"CAMERA_ID=" + strconv.FormatInt(int64(params.CameraID), 10),
"WIDTH=" + strconv.FormatInt(int64(params.Width), 10),
"HEIGHT=" + strconv.FormatInt(int64(params.Height), 10),
"FPS=" + strconv.FormatInt(int64(params.FPS), 10),
"IDR_PERIOD=" + strconv.FormatInt(int64(params.IDRPeriod), 10),
"BITRATE=" + strconv.FormatInt(int64(params.Bitrate), 10),
"PROFILE=" + params.Profile,
"LEVEL=" + params.Level,
}
exe, err := newEmbeddedExe(exeContent, env)
if err != nil {
pipe.close()
return nil, err
}
waitDone := make(chan error)
go func() {
waitDone <- exe.cmd.Wait()
}()
readerDone := make(chan error)
go func() {
readerDone <- func() error {
buf, err := pipe.read()
if err != nil {
return err
}
switch buf[0] {
case 'e':
return fmt.Errorf(string(buf[1:]))
case 'r':
return nil
default:
return fmt.Errorf("unexpected output from pipe (%c)", buf[0])
}
}()
}()
select {
case <-waitDone:
exe.close()
pipe.close()
<-readerDone
return nil, fmt.Errorf("process exited unexpectedly")
case err := <-readerDone:
if err != nil {
exe.close()
<-waitDone
pipe.close()
return nil, err
}
}
readerDone = make(chan error)
go func() {
readerDone <- func() error {
for {
buf, err := pipe.read()
if err != nil {
return err
}
if buf[0] != 'b' {
return fmt.Errorf("unexpected output from pipe (%c)", buf[0])
}
buf = buf[1:]
nalus, err := h264.AnnexBUnmarshal(buf)
if err != nil {
return err
}
onData(nalus)
}
}()
}()
return &RPICamera{
onData: onData,
exe: exe,
pipe: pipe,
waitDone: waitDone,
readerDone: readerDone,
}, nil
}
func (c *RPICamera) Close() {
c.exe.close()
<-c.waitDone
c.pipe.close()
<-c.readerDone
}

23
internal/rpicamera/rpicamera_disabled.go

@ -0,0 +1,23 @@ @@ -0,0 +1,23 @@
//go:build !rpicamera
// +build !rpicamera
package rpicamera
import (
"fmt"
)
// RPICamera is a RPI Camera reader.
type RPICamera struct{}
// New allocates a RPICamera.
func New(
params Params,
onData func([][]byte),
) (*RPICamera, error) {
return nil, fmt.Errorf("server was compiled without support for the Raspberry Pi Camera")
}
// Close closes a RPICamera.
func (c *RPICamera) Close() {
}

11
rtsp-simple-server.yml

@ -173,6 +173,7 @@ paths: @@ -173,6 +173,7 @@ paths:
# * http://existing-url/stream.m3u8 -> the stream is pulled from another HLS server
# * https://existing-url/stream.m3u8 -> the stream is pulled from another HLS server with HTTPS
# * redirect -> the stream is provided by another path or server
# * rpiCamera -> the stream is provided by a Raspberry Pi Camera
source: publisher
# If the source is an RTSP or RTSPS URL, this is the protocol that will be used to
@ -213,6 +214,16 @@ paths: @@ -213,6 +214,16 @@ paths:
# path. It can be can be a relative path (i.e. /otherstream) or an absolute RTSP URL.
fallback:
# If the source is "rpiCamera", these are the Raspberry Pi Camera parameters
rpiCameraCamID: 0
rpiCameraWidth: 1280
rpiCameraHeight: 720
rpiCameraFPS: 30
rpiCameraIDRPeriod: 60
rpiCameraBitrate: 1000000
rpiCameraProfile: main
rpiCameraLevel: '4.1'
# Username required to publish.
# SHA256-hashed values can be inserted with the "sha256:" prefix.
publishUser:

Loading…
Cancel
Save