Browse Source

Add additional Raspberry Pi Camera parameters (#1198)

* rpicamera: add rpiCameraHFlip and rpiCameraVFlip parameters

* rpicamera: add rpiCameraBrightness, rpiCameraContrast,
rpiCameraSaturation, rpiCameraSharpness, rpiCameraExposure,
rpiCameraAWB, rpiCameraDenoise, rpiCameraShutter, rpiCameraMetering,
rpiCameraGain, rpiCameraEV, rpiCameraROI, rpiCameraTuningFile

* support float values in config file
pull/1219/head
Alessandro Ros 3 years ago committed by GitHub
parent
commit
8825fddd89
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      README.md
  2. 10
      internal/conf/env.go
  3. 28
      internal/conf/env_test.go
  4. 24
      internal/conf/path.go
  5. 31
      internal/core/source_static.go
  6. 148
      internal/rpicamera/exe/camera.cpp
  7. 132
      internal/rpicamera/exe/encoder.c
  8. 16
      internal/rpicamera/exe/parameters.c
  9. 17
      internal/rpicamera/exe/parameters.h
  10. 31
      internal/rpicamera/params.go
  11. 22
      internal/rpicamera/rpicamera.go
  12. 44
      rtsp-simple-server.yml
  13. 4
      scripts/binaries.mk

2
README.md

@ -557,7 +557,7 @@ paths:
rpiCameraHeight: 1080 rpiCameraHeight: 1080
``` ```
All available parameters are listed in the [sample configuration file](https://github.com/aler9/rtsp-simple-server/blob/1e788f81fd46c7e8f5314bd4ae04989debfff52c/rtsp-simple-server.yml#L230). All available parameters are listed in the [sample configuration file](https://github.com/aler9/rtsp-simple-server/blob/master/rtsp-simple-server.yml#L230).
### From OBS Studio ### From OBS Studio

10
internal/conf/env.go

@ -52,6 +52,16 @@ func loadEnvInternal(env map[string]string, prefix string, rv reflect.Value) err
} }
return nil return nil
case reflect.TypeOf(float64(0)):
if ev, ok := env[prefix]; ok {
iv, err := strconv.ParseFloat(ev, 64)
if err != nil {
return fmt.Errorf("%s: %s", prefix, err)
}
rv.SetFloat(iv)
}
return nil
case reflect.TypeOf(bool(false)): case reflect.TypeOf(bool(false)):
if ev, ok := env[prefix]; ok { if ev, ok := env[prefix]; ok {
switch strings.ToLower(ev) { switch strings.ToLower(ev) {

28
internal/conf/env_test.go

@ -9,33 +9,21 @@ import (
) )
type subStruct struct { type subStruct struct {
// int
MyParam int MyParam int
} }
type mapEntry struct { type mapEntry struct {
// string MyValue string
MyValue string
// struct
MyStruct subStruct MyStruct subStruct
} }
type testStruct struct { type testStruct struct {
// string MyString string
MyString string MyInt int
MyFloat float64
// int MyBool bool
MyInt int
// bool
MyBool bool
// duration
MyDuration StringDuration MyDuration StringDuration
MyMap map[string]*mapEntry
// map
MyMap map[string]*mapEntry
} }
func TestEnvironment(t *testing.T) { func TestEnvironment(t *testing.T) {
@ -45,6 +33,9 @@ func TestEnvironment(t *testing.T) {
os.Setenv("MYPREFIX_MYINT", "123") os.Setenv("MYPREFIX_MYINT", "123")
defer os.Unsetenv("MYPREFIX_MYINT") defer os.Unsetenv("MYPREFIX_MYINT")
os.Setenv("MYPREFIX_MYFLOAT", "15.2")
defer os.Unsetenv("MYPREFIX_MYFLOAT")
os.Setenv("MYPREFIX_MYBOOL", "yes") os.Setenv("MYPREFIX_MYBOOL", "yes")
defer os.Unsetenv("MYPREFIX_MYBOOL") defer os.Unsetenv("MYPREFIX_MYBOOL")
@ -66,6 +57,7 @@ func TestEnvironment(t *testing.T) {
require.Equal(t, "testcontent", s.MyString) require.Equal(t, "testcontent", s.MyString)
require.Equal(t, 123, s.MyInt) require.Equal(t, 123, s.MyInt)
require.Equal(t, 15.2, s.MyFloat)
require.Equal(t, true, s.MyBool) require.Equal(t, true, s.MyBool)
require.Equal(t, 22*StringDuration(time.Second), s.MyDuration) require.Equal(t, 22*StringDuration(time.Second), s.MyDuration)

24
internal/conf/path.go

@ -52,6 +52,21 @@ type PathConf struct {
RPICameraCamID int `json:"rpiCameraCamID"` RPICameraCamID int `json:"rpiCameraCamID"`
RPICameraWidth int `json:"rpiCameraWidth"` RPICameraWidth int `json:"rpiCameraWidth"`
RPICameraHeight int `json:"rpiCameraHeight"` RPICameraHeight int `json:"rpiCameraHeight"`
RPICameraHFlip bool `json:"rpiCameraHFlip"`
RPICameraVFlip bool `json:"rpiCameraVFlip"`
RPICameraBrightness float64 `json:"rpiCameraBrightness"`
RPICameraContrast float64 `json:"rpiCameraContrast"`
RPICameraSaturation float64 `json:"rpiCameraSaturation"`
RPICameraSharpness float64 `json:"rpiCameraSharpness"`
RPICameraExposure string `json:"rpiCameraExposure"`
RPICameraAWB string `json:"rpiCameraAWB"`
RPICameraDenoise string `json:"rpiCameraDenoise"`
RPICameraShutter int `json:"rpiCameraShutter"`
RPICameraMetering string `json:"rpiCameraMetering"`
RPICameraGain float64 `json:"rpiCameraGain"`
RPICameraEV float64 `json:"rpiCameraEV"`
RPICameraROI string `json:"rpiCameraROI"`
RPICameraTuningFile string `json:"rpiCameraTuningFile"`
RPICameraFPS int `json:"rpiCameraFPS"` RPICameraFPS int `json:"rpiCameraFPS"`
RPICameraIDRPeriod int `json:"rpiCameraIDRPeriod"` RPICameraIDRPeriod int `json:"rpiCameraIDRPeriod"`
RPICameraBitrate int `json:"rpiCameraBitrate"` RPICameraBitrate int `json:"rpiCameraBitrate"`
@ -183,6 +198,15 @@ func (pconf *PathConf) checkAndFillMissing(conf *Conf, name string) error {
if pconf.RPICameraHeight == 0 { if pconf.RPICameraHeight == 0 {
pconf.RPICameraHeight = 720 pconf.RPICameraHeight = 720
} }
if pconf.RPICameraContrast == 0 {
pconf.RPICameraContrast = 1
}
if pconf.RPICameraSaturation == 0 {
pconf.RPICameraSaturation = 1
}
if pconf.RPICameraSharpness == 0 {
pconf.RPICameraSharpness = 1
}
if pconf.RPICameraFPS == 0 { if pconf.RPICameraFPS == 0 {
pconf.RPICameraFPS = 30 pconf.RPICameraFPS = 30
} }

31
internal/core/source_static.go

@ -86,14 +86,29 @@ func newSourceStatic(
case conf.Source == "rpiCamera": case conf.Source == "rpiCamera":
s.impl = newRPICameraSource( s.impl = newRPICameraSource(
rpicamera.Params{ rpicamera.Params{
CameraID: conf.RPICameraCamID, CameraID: conf.RPICameraCamID,
Width: conf.RPICameraWidth, Width: conf.RPICameraWidth,
Height: conf.RPICameraHeight, Height: conf.RPICameraHeight,
FPS: conf.RPICameraFPS, HFlip: conf.RPICameraHFlip,
IDRPeriod: conf.RPICameraIDRPeriod, VFlip: conf.RPICameraVFlip,
Bitrate: conf.RPICameraBitrate, Brightness: conf.RPICameraBrightness,
Profile: conf.RPICameraProfile, Contrast: conf.RPICameraContrast,
Level: conf.RPICameraLevel, Saturation: conf.RPICameraSaturation,
Sharpness: conf.RPICameraSharpness,
Exposure: conf.RPICameraExposure,
AWB: conf.RPICameraAWB,
Denoise: conf.RPICameraDenoise,
Shutter: conf.RPICameraShutter,
Metering: conf.RPICameraMetering,
Gain: conf.RPICameraGain,
EV: conf.RPICameraEV,
ROI: conf.RPICameraROI,
TuningFile: conf.RPICameraTuningFile,
FPS: conf.RPICameraFPS,
IDRPeriod: conf.RPICameraIDRPeriod,
Bitrate: conf.RPICameraBitrate,
Profile: conf.RPICameraProfile,
Level: conf.RPICameraLevel,
}, },
s) s)
} }

148
internal/rpicamera/exe/camera.cpp

@ -10,6 +10,7 @@
#include <libcamera/control_ids.h> #include <libcamera/control_ids.h>
#include <libcamera/controls.h> #include <libcamera/controls.h>
#include <libcamera/framebuffer_allocator.h> #include <libcamera/framebuffer_allocator.h>
#include <libcamera/property_ids.h>
#include <linux/videodev2.h> #include <linux/videodev2.h>
#include "parameters.h" #include "parameters.h"
@ -18,15 +19,17 @@
using libcamera::CameraManager; using libcamera::CameraManager;
using libcamera::CameraConfiguration; using libcamera::CameraConfiguration;
using libcamera::Camera; using libcamera::Camera;
using libcamera::StreamRoles;
using libcamera::StreamRole;
using libcamera::StreamConfiguration;
using libcamera::Stream;
using libcamera::ControlList; using libcamera::ControlList;
using libcamera::FrameBufferAllocator; using libcamera::FrameBufferAllocator;
using libcamera::FrameBuffer; using libcamera::FrameBuffer;
using libcamera::Rectangle;
using libcamera::Request; using libcamera::Request;
using libcamera::Span; using libcamera::Span;
using libcamera::Stream;
using libcamera::StreamRoles;
using libcamera::StreamRole;
using libcamera::StreamConfiguration;
using libcamera::Transform;
namespace controls = libcamera::controls; namespace controls = libcamera::controls;
namespace formats = libcamera::formats; namespace formats = libcamera::formats;
@ -53,10 +56,10 @@ struct CameraPriv {
}; };
static int get_v4l2_colorspace(std::optional<libcamera::ColorSpace> const &cs) { static int get_v4l2_colorspace(std::optional<libcamera::ColorSpace> const &cs) {
if (cs == libcamera::ColorSpace::Rec709) { if (cs == libcamera::ColorSpace::Rec709) {
return V4L2_COLORSPACE_REC709; return V4L2_COLORSPACE_REC709;
} }
return V4L2_COLORSPACE_SMPTE170M; return V4L2_COLORSPACE_SMPTE170M;
} }
bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **cam) { bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **cam) {
@ -70,9 +73,9 @@ bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **ca
} }
std::vector<std::shared_ptr<libcamera::Camera>> cameras = camp->camera_manager->cameras(); std::vector<std::shared_ptr<libcamera::Camera>> cameras = camp->camera_manager->cameras();
auto rem = std::remove_if(cameras.begin(), cameras.end(), auto rem = std::remove_if(cameras.begin(), cameras.end(),
[](auto &cam) { return cam->id().find("/usb") != std::string::npos; }); [](auto &cam) { return cam->id().find("/usb") != std::string::npos; });
cameras.erase(rem, cameras.end()); cameras.erase(rem, cameras.end());
if (params->camera_id >= cameras.size()){ if (params->camera_id >= cameras.size()){
set_error("selected camera is not available"); set_error("selected camera is not available");
return false; return false;
@ -98,18 +101,28 @@ bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **ca
} }
StreamConfiguration &stream_conf = conf->at(0); StreamConfiguration &stream_conf = conf->at(0);
stream_conf.pixelFormat = formats::YUV420; stream_conf.pixelFormat = formats::YUV420;
stream_conf.bufferCount = params->buffer_count; stream_conf.bufferCount = params->buffer_count;
stream_conf.size.width = params->width; stream_conf.size.width = params->width;
stream_conf.size.height = params->height; stream_conf.size.height = params->height;
if (params->width >= 1280 || params->height >= 720) { if (params->width >= 1280 || params->height >= 720) {
stream_conf.colorSpace = libcamera::ColorSpace::Rec709; stream_conf.colorSpace = libcamera::ColorSpace::Rec709;
} else { } else {
stream_conf.colorSpace = libcamera::ColorSpace::Smpte170m; stream_conf.colorSpace = libcamera::ColorSpace::Smpte170m;
} }
CameraConfiguration::Status vstatus = conf->validate(); conf->transform = Transform::Identity;
if (vstatus == CameraConfiguration::Invalid) { if (params->h_flip) {
conf->transform = Transform::HFlip * conf->transform;
}
if (params->v_flip) {
conf->transform = Transform::VFlip * conf->transform;
}
setenv("LIBCAMERA_RPI_TUNING_FILE", params->tuning_file, 1);
CameraConfiguration::Status vstatus = conf->validate();
if (vstatus == CameraConfiguration::Invalid) {
set_error("StreamConfiguration.validate() failed"); set_error("StreamConfiguration.validate() failed");
return false; return false;
} }
@ -122,7 +135,7 @@ bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **ca
Stream *stream = stream_conf.stream(); Stream *stream = stream_conf.stream();
camp->allocator = std::make_unique<FrameBufferAllocator>(camp->camera); camp->allocator = std::make_unique<FrameBufferAllocator>(camp->camera);
res = camp->allocator->allocate(stream); res = camp->allocator->allocate(stream);
if (res < 0) { if (res < 0) {
set_error("allocate() failed"); set_error("allocate() failed");
@ -137,7 +150,7 @@ bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **ca
} }
int res = request->addBuffer(stream, buffer.get()); int res = request->addBuffer(stream, buffer.get());
if (res != 0) { if (res != 0) {
set_error("addBuffer() failed"); set_error("addBuffer() failed");
return false; return false;
} }
@ -154,7 +167,7 @@ bool camera_create(parameters_t *params, camera_frame_cb frame_cb, camera_t **ca
static void on_request_complete(Request *request) { static void on_request_complete(Request *request) {
if (request->status() == Request::RequestCancelled) { if (request->status() == Request::RequestCancelled) {
return; return;
} }
CameraPriv *camp = (CameraPriv *)request->cookie(); CameraPriv *camp = (CameraPriv *)request->cookie();
@ -186,6 +199,97 @@ bool camera_start(camera_t *cam) {
CameraPriv *camp = (CameraPriv *)cam; CameraPriv *camp = (CameraPriv *)cam;
ControlList ctrls = ControlList(controls::controls); ControlList ctrls = ControlList(controls::controls);
ctrls.set(controls::Brightness, camp->params->brightness);
ctrls.set(controls::Contrast, camp->params->contrast);
ctrls.set(controls::Saturation, camp->params->saturation);
ctrls.set(controls::Sharpness, camp->params->sharpness);
int exposure_mode;
if (strcmp(camp->params->exposure, "short") == 0) {
exposure_mode = controls::ExposureShort;
} else if (strcmp(camp->params->exposure, "long") == 0) {
exposure_mode = controls::ExposureLong;
} else if (strcmp(camp->params->exposure, "custom") == 0) {
exposure_mode = controls::ExposureCustom;
} else {
exposure_mode = controls::ExposureNormal;
}
ctrls.set(controls::AeExposureMode, exposure_mode);
int awb_mode;
if (strcmp(camp->params->awb, "incandescent") == 0) {
awb_mode = controls::AwbIncandescent;
} else if (strcmp(camp->params->awb, "tungsten") == 0) {
awb_mode = controls::AwbTungsten;
} else if (strcmp(camp->params->awb, "fluorescent") == 0) {
awb_mode = controls::AwbFluorescent;
} else if (strcmp(camp->params->awb, "indoor") == 0) {
awb_mode = controls::AwbIndoor;
} else if (strcmp(camp->params->awb, "daylight") == 0) {
awb_mode = controls::AwbDaylight;
} else if (strcmp(camp->params->awb, "cloudy") == 0) {
awb_mode = controls::AwbCloudy;
} else if (strcmp(camp->params->awb, "custom") == 0) {
awb_mode = controls::AwbCustom;
} else {
awb_mode = controls::AwbAuto;
}
ctrls.set(controls::AwbMode, awb_mode);
int denoise_mode;
if (strcmp(camp->params->denoise, "off") == 0) {
denoise_mode = controls::draft::NoiseReductionModeOff;
} else if (strcmp(camp->params->denoise, "cdn_off") == 0) {
denoise_mode = controls::draft::NoiseReductionModeMinimal;
} if (strcmp(camp->params->denoise, "cdn_hq") == 0) {
denoise_mode = controls::draft::NoiseReductionModeHighQuality;
} else {
denoise_mode = controls::draft::NoiseReductionModeFast;
}
ctrls.set(controls::draft::NoiseReductionMode, denoise_mode);
if (camp->params->shutter != 0) {
ctrls.set(controls::ExposureTime, camp->params->shutter);
}
int metering_mode;
if (strcmp(camp->params->metering, "spot") == 0) {
metering_mode = controls::MeteringSpot;
} else if (strcmp(camp->params->metering, "matrix") == 0) {
metering_mode = controls::MeteringMatrix;
} else if (strcmp(camp->params->metering, "custom") == 0) {
metering_mode = controls::MeteringCustom;
} else {
metering_mode = controls::MeteringCentreWeighted;
}
ctrls.set(controls::AeMeteringMode, metering_mode);
if (camp->params->gain > 0) {
ctrls.set(controls::AnalogueGain, camp->params->gain);
}
ctrls.set(controls::ExposureValue, camp->params->ev);
if (strlen(camp->params->roi) != 0) {
float vals[4];
int i = 0;
char *token = strtok((char *)camp->params->roi, ",");
while (token != NULL) {
vals[i++] = atof(token);
token = strtok(NULL, ",");
}
Rectangle sensor_area = camp->camera->properties().get(libcamera::properties::ScalerCropMaximum);
Rectangle crop(
vals[0] * sensor_area.width,
vals[1] * sensor_area.height,
vals[2] * sensor_area.width,
vals[3] * sensor_area.height);
crop.translateBy(sensor_area.topLeft());
ctrls.set(controls::ScalerCrop, crop);
}
int64_t frame_time = 1000000 / camp->params->fps; int64_t frame_time = 1000000 / camp->params->fps;
ctrls.set(controls::FrameDurationLimits, Span<const int64_t, 2>({ frame_time, frame_time })); ctrls.set(controls::FrameDurationLimits, Span<const int64_t, 2>({ frame_time, frame_time }));
@ -197,13 +301,13 @@ bool camera_start(camera_t *cam) {
camp->camera->requestCompleted.connect(on_request_complete); camp->camera->requestCompleted.connect(on_request_complete);
for (std::unique_ptr<Request> &request : camp->requests) { for (std::unique_ptr<Request> &request : camp->requests) {
int res = camp->camera->queueRequest(request.get()); int res = camp->camera->queueRequest(request.get());
if (res != 0) { if (res != 0) {
set_error("Camera.queueRequest() failed"); set_error("Camera.queueRequest() failed");
return false; return false;
} }
} }
return true; return true;
} }

132
internal/rpicamera/exe/encoder.c

@ -102,15 +102,15 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
*enc = malloc(sizeof(encoder_priv_t)); *enc = malloc(sizeof(encoder_priv_t));
encoder_priv_t *encp = (encoder_priv_t *)(*enc); encoder_priv_t *encp = (encoder_priv_t *)(*enc);
encp->fd = open("/dev/video11", O_RDWR, 0); encp->fd = open("/dev/video11", O_RDWR, 0);
if (encp->fd < 0) { if (encp->fd < 0) {
set_error("unable to open device"); set_error("unable to open device");
return false; return false;
} }
struct v4l2_control ctrl = {0}; struct v4l2_control ctrl = {0};
ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE; ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
ctrl.value = params->bitrate; ctrl.value = params->bitrate;
int res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl); int res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) { if (res != 0) {
set_error("unable to set bitrate"); set_error("unable to set bitrate");
@ -119,7 +119,7 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
} }
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE; ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
ctrl.value = params->profile; ctrl.value = params->profile;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl); res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) { if (res != 0) {
set_error("unable to set profile"); set_error("unable to set profile");
@ -128,7 +128,7 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
} }
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
ctrl.value = params->level; ctrl.value = params->level;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl); res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) { if (res != 0) {
set_error("unable to set level"); set_error("unable to set level");
@ -137,7 +137,7 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
} }
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD; ctrl.id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD;
ctrl.value = params->idr_period; ctrl.value = params->idr_period;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl); res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) { if (res != 0) {
set_error("unable to set IDR period"); set_error("unable to set IDR period");
@ -146,7 +146,7 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
} }
ctrl.id = V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER; ctrl.id = V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER;
ctrl.value = 0; ctrl.value = 0;
res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl); res = ioctl(encp->fd, VIDIOC_S_CTRL, &ctrl);
if (res != 0) { if (res != 0) {
set_error("unable to set REPEAT_SEQ_HEADER"); set_error("unable to set REPEAT_SEQ_HEADER");
@ -154,15 +154,15 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
return false; return false;
} }
struct v4l2_format fmt = {0}; struct v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fmt.fmt.pix_mp.width = params->width; fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height; fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420; fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = stride; fmt.fmt.pix_mp.plane_fmt[0].bytesperline = stride;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = colorspace; fmt.fmt.pix_mp.colorspace = colorspace;
fmt.fmt.pix_mp.num_planes = 1; fmt.fmt.pix_mp.num_planes = 1;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt); res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) { if (res != 0) {
set_error("unable to set output format"); set_error("unable to set output format");
@ -170,16 +170,16 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
return false; return false;
} }
memset(&fmt, 0, sizeof(fmt)); memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = params->width; fmt.fmt.pix_mp.width = params->width;
fmt.fmt.pix_mp.height = params->height; fmt.fmt.pix_mp.height = params->height;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_H264; fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_H264;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_DEFAULT; fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_DEFAULT;
fmt.fmt.pix_mp.num_planes = 1; fmt.fmt.pix_mp.num_planes = 1;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0; fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10; fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10;
res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt); res = ioctl(encp->fd, VIDIOC_S_FMT, &fmt);
if (res != 0) { if (res != 0) {
set_error("unable to set capture format"); set_error("unable to set capture format");
@ -187,10 +187,10 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
return false; return false;
} }
struct v4l2_streamparm parm = {0}; struct v4l2_streamparm parm = {0};
parm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; parm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
parm.parm.output.timeperframe.numerator = 1; parm.parm.output.timeperframe.numerator = 1;
parm.parm.output.timeperframe.denominator = params->fps; parm.parm.output.timeperframe.denominator = params->fps;
res = ioctl(encp->fd, VIDIOC_S_PARM, &parm); res = ioctl(encp->fd, VIDIOC_S_PARM, &parm);
if (res != 0) { if (res != 0) {
set_error("unable to set fps"); set_error("unable to set fps");
@ -198,10 +198,10 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
return false; return false;
} }
struct v4l2_requestbuffers reqbufs = {0}; struct v4l2_requestbuffers reqbufs = {0};
reqbufs.count = params->buffer_count; reqbufs.count = params->buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
reqbufs.memory = V4L2_MEMORY_DMABUF; reqbufs.memory = V4L2_MEMORY_DMABUF;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs); res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) { if (res != 0) {
set_error("unable to set output buffers"); set_error("unable to set output buffers");
@ -209,10 +209,10 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
return false; return false;
} }
memset(&reqbufs, 0, sizeof(reqbufs)); memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = params->capture_buffer_count; reqbufs.count = params->capture_buffer_count;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.memory = V4L2_MEMORY_MMAP;
res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs); res = ioctl(encp->fd, VIDIOC_REQBUFS, &reqbufs);
if (res != 0) { if (res != 0) {
set_error("unable to set capture buffers"); set_error("unable to set capture buffers");
@ -222,30 +222,30 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
encp->capture_buffers = malloc(sizeof(void *) * reqbufs.count); encp->capture_buffers = malloc(sizeof(void *) * reqbufs.count);
for (unsigned int i = 0; i < reqbufs.count; i++) { for (unsigned int i = 0; i < reqbufs.count; i++) {
struct v4l2_plane planes[VIDEO_MAX_PLANES]; struct v4l2_plane planes[VIDEO_MAX_PLANES];
struct v4l2_buffer buffer = {0}; struct v4l2_buffer buffer = {0};
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buffer.memory = V4L2_MEMORY_MMAP; buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i; buffer.index = i;
buffer.length = 1; buffer.length = 1;
buffer.m.planes = planes; buffer.m.planes = planes;
int res = ioctl(encp->fd, VIDIOC_QUERYBUF, &buffer); int res = ioctl(encp->fd, VIDIOC_QUERYBUF, &buffer);
if (res != 0) { if (res != 0) {
set_error("unable to query buffer"); set_error("unable to query buffer");
free(encp->capture_buffers); free(encp->capture_buffers);
close(encp->fd); close(encp->fd);
return false; return false;
} }
encp->capture_buffers[i] = mmap( encp->capture_buffers[i] = mmap(
0, 0,
buffer.m.planes[0].length, buffer.m.planes[0].length,
PROT_READ | PROT_WRITE, MAP_SHARED, PROT_READ | PROT_WRITE, MAP_SHARED,
encp->fd, encp->fd,
buffer.m.planes[0].m.mem_offset); buffer.m.planes[0].m.mem_offset);
if (encp->capture_buffers[i] == MAP_FAILED) { if (encp->capture_buffers[i] == MAP_FAILED) {
set_error("mmap() failed"); set_error("mmap() failed");
free(encp->capture_buffers); free(encp->capture_buffers);
close(encp->fd); close(encp->fd);
@ -253,15 +253,15 @@ bool encoder_create(parameters_t *params, int stride, int colorspace, encoder_ou
} }
res = ioctl(encp->fd, VIDIOC_QBUF, &buffer); res = ioctl(encp->fd, VIDIOC_QBUF, &buffer);
if (res != 0) { if (res != 0) {
set_error("ioctl() failed"); set_error("ioctl() failed");
free(encp->capture_buffers); free(encp->capture_buffers);
close(encp->fd); close(encp->fd);
return false; return false;
} }
} }
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
res = ioctl(encp->fd, VIDIOC_STREAMON, &type); res = ioctl(encp->fd, VIDIOC_STREAMON, &type);
if (res != 0) { if (res != 0) {
set_error("unable to activate output stream"); set_error("unable to activate output stream");
@ -294,20 +294,20 @@ void encoder_encode(encoder_t *enc, int buffer_fd, size_t size, int64_t timestam
int index = encp->cur_buffer++; int index = encp->cur_buffer++;
encp->cur_buffer %= encp->params->buffer_count; encp->cur_buffer %= encp->params->buffer_count;
struct v4l2_buffer buf = {0}; struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0}; struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.index = index; buf.index = index;
buf.field = V4L2_FIELD_NONE; buf.field = V4L2_FIELD_NONE;
buf.memory = V4L2_MEMORY_DMABUF; buf.memory = V4L2_MEMORY_DMABUF;
buf.length = 1; buf.length = 1;
buf.timestamp.tv_sec = timestamp_us / 1000000; buf.timestamp.tv_sec = timestamp_us / 1000000;
buf.timestamp.tv_usec = timestamp_us % 1000000; buf.timestamp.tv_usec = timestamp_us % 1000000;
buf.m.planes = planes; buf.m.planes = planes;
buf.m.planes[0].m.fd = buffer_fd; buf.m.planes[0].m.fd = buffer_fd;
buf.m.planes[0].bytesused = size; buf.m.planes[0].bytesused = size;
buf.m.planes[0].length = size; buf.m.planes[0].length = size;
int res = ioctl(encp->fd, VIDIOC_QBUF, &buf); int res = ioctl(encp->fd, VIDIOC_QBUF, &buf);
if (res != 0) { if (res != 0) {
fprintf(stderr, "encoder_encode(): ioctl() failed\n"); fprintf(stderr, "encoder_encode(): ioctl() failed\n");
exit(1); exit(1);

16
internal/rpicamera/exe/parameters.c

@ -1,5 +1,6 @@
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <stdbool.h>
#include <linux/videodev2.h> #include <linux/videodev2.h>
@ -9,6 +10,21 @@ void parameters_load(parameters_t *params) {
params->camera_id = atoi(getenv("CAMERA_ID")); params->camera_id = atoi(getenv("CAMERA_ID"));
params->width = atoi(getenv("WIDTH")); params->width = atoi(getenv("WIDTH"));
params->height = atoi(getenv("HEIGHT")); params->height = atoi(getenv("HEIGHT"));
params->h_flip = (strcmp(getenv("H_FLIP"), "1") == 0);
params->v_flip = (strcmp(getenv("V_FLIP"), "1") == 0);
params->brightness = atof(getenv("BRIGHTNESS"));
params->contrast = atof(getenv("CONTRAST"));
params->saturation = atof(getenv("SATURATION"));
params->sharpness = atof(getenv("SHARPNESS"));
params->exposure = getenv("EXPOSURE");
params->awb = getenv("AWB");
params->denoise = getenv("DENOISE");
params->shutter = atoi(getenv("SHUTTER"));
params->metering = getenv("METERING");
params->gain = atof(getenv("GAIN"));
params->ev = atof(getenv("EV"));
params->roi = getenv("ROI");
params->tuning_file = getenv("TUNING_FILE");
params->fps = atoi(getenv("FPS")); params->fps = atoi(getenv("FPS"));
params->idr_period = atoi(getenv("IDR_PERIOD")); params->idr_period = atoi(getenv("IDR_PERIOD"));
params->bitrate = atoi(getenv("BITRATE")); params->bitrate = atoi(getenv("BITRATE"));

17
internal/rpicamera/exe/parameters.h

@ -2,11 +2,28 @@ typedef struct {
unsigned int camera_id; unsigned int camera_id;
unsigned int width; unsigned int width;
unsigned int height; unsigned int height;
bool h_flip;
bool v_flip;
float brightness;
float contrast;
float saturation;
float sharpness;
const char *exposure;
const char *awb;
const char *denoise;
unsigned int shutter;
const char *metering;
float gain;
float ev;
const char *roi;
const char *tuning_file;
unsigned int fps; unsigned int fps;
unsigned int idr_period; unsigned int idr_period;
unsigned int bitrate; unsigned int bitrate;
unsigned int profile; unsigned int profile;
unsigned int level; unsigned int level;
// private
unsigned int buffer_count; unsigned int buffer_count;
unsigned int capture_buffer_count; unsigned int capture_buffer_count;
} parameters_t; } parameters_t;

31
internal/rpicamera/params.go

@ -2,12 +2,27 @@ package rpicamera
// Params is a set of camera parameters. // Params is a set of camera parameters.
type Params struct { type Params struct {
CameraID int CameraID int
Width int Width int
Height int Height int
FPS int HFlip bool
IDRPeriod int VFlip bool
Bitrate int Brightness float64
Profile string Contrast float64
Level string Saturation float64
Sharpness float64
Exposure string
AWB string
Denoise string
Shutter int
Metering string
Gain float64
EV float64
ROI string
TuningFile string
FPS int
IDRPeriod int
Bitrate int
Profile string
Level string
} }

22
internal/rpicamera/rpicamera.go

@ -14,6 +14,13 @@ import (
//go:embed exe/exe //go:embed exe/exe
var exeContent []byte var exeContent []byte
func bool2env(v bool) string {
if v {
return "1"
}
return "0"
}
type RPICamera struct { type RPICamera struct {
onData func([][]byte) onData func([][]byte)
@ -38,6 +45,21 @@ func New(
"CAMERA_ID=" + strconv.FormatInt(int64(params.CameraID), 10), "CAMERA_ID=" + strconv.FormatInt(int64(params.CameraID), 10),
"WIDTH=" + strconv.FormatInt(int64(params.Width), 10), "WIDTH=" + strconv.FormatInt(int64(params.Width), 10),
"HEIGHT=" + strconv.FormatInt(int64(params.Height), 10), "HEIGHT=" + strconv.FormatInt(int64(params.Height), 10),
"H_FLIP=" + bool2env(params.HFlip),
"V_FLIP=" + bool2env(params.VFlip),
"BRIGHTNESS=" + strconv.FormatFloat(params.Brightness, 'f', -1, 64),
"CONTRAST=" + strconv.FormatFloat(params.Contrast, 'f', -1, 64),
"SATURATION=" + strconv.FormatFloat(params.Saturation, 'f', -1, 64),
"SHARPNESS=" + strconv.FormatFloat(params.Sharpness, 'f', -1, 64),
"EXPOSURE=" + params.Exposure,
"AWB=" + params.AWB,
"DENOISE=" + params.Denoise,
"SHUTTER=" + strconv.FormatInt(int64(params.Shutter), 10),
"METERING=" + params.Metering,
"GAIN=" + strconv.FormatFloat(params.Gain, 'f', -1, 64),
"EV=" + strconv.FormatFloat(params.EV, 'f', -1, 64),
"ROI=" + params.ROI,
"TUNING_FILE=" + params.TuningFile,
"FPS=" + strconv.FormatInt(int64(params.FPS), 10), "FPS=" + strconv.FormatInt(int64(params.FPS), 10),
"IDR_PERIOD=" + strconv.FormatInt(int64(params.IDRPeriod), 10), "IDR_PERIOD=" + strconv.FormatInt(int64(params.IDRPeriod), 10),
"BITRATE=" + strconv.FormatInt(int64(params.Bitrate), 10), "BITRATE=" + strconv.FormatInt(int64(params.Bitrate), 10),

44
rtsp-simple-server.yml

@ -227,14 +227,56 @@ paths:
# path. It can be can be a relative path (i.e. /otherstream) or an absolute RTSP URL. # path. It can be can be a relative path (i.e. /otherstream) or an absolute RTSP URL.
fallback: fallback:
# If the source is "rpiCamera", these are the Raspberry Pi Camera parameters # If the source is "rpiCamera", these are the Raspberry Pi Camera parameters.
# ID of the camera
rpiCameraCamID: 0 rpiCameraCamID: 0
# width of frames
rpiCameraWidth: 1280 rpiCameraWidth: 1280
# height of frames
rpiCameraHeight: 720 rpiCameraHeight: 720
# flip horizontally
rpiCameraHFlip: false
# flip vertically
rpiCameraVFlip: false
# brightness [-1, 1]
rpiCameraBrightness: 0
# contrast [0, 16]
rpiCameraContrast: 1
# saturation [0, 16]
rpiCameraSaturation: 1
# sharpness [0, 16]
rpiCameraSharpness: 1
# exposure mode.
# values: normal, short, long, custom
rpiCameraExposure: normal
# auto-white-balance mode.
# values: auto, incandescent, tungsten, fluorescent, indoor, daylight, cloudy, custom
rpiCameraAWB: auto
# denoise operating mode.
# values: auto, off, cdn_off, cdn_fast, cdn_hq
rpiCameraDenoise: auto
# fixed shutter speed, in microseconds.
rpiCameraShutter: 0
# metering mode of the AEC/AGC algorithm.
# values: centre, spot, matrix, custom
rpiCameraMetering: centre
# fixed gain
rpiCameraGain: 0
# EV compensation of the image [-10, 10]
rpiCameraEV: 0
# Region of interst in format x,y,width,height
rpiCameraROI:
# tuning file
rpiCameraTuningFile:
# frames per second
rpiCameraFPS: 30 rpiCameraFPS: 30
# period between IDR frames
rpiCameraIDRPeriod: 60 rpiCameraIDRPeriod: 60
# bitrate
rpiCameraBitrate: 1000000 rpiCameraBitrate: 1000000
# H264 profile
rpiCameraProfile: main rpiCameraProfile: main
# H264 level
rpiCameraLevel: '4.1' rpiCameraLevel: '4.1'
# Username required to publish. # Username required to publish.

4
scripts/binaries.mk

@ -4,14 +4,14 @@ RUN ["cross-build-start"]
RUN apt update && apt install -y g++ pkg-config make libcamera-dev RUN apt update && apt install -y g++ pkg-config make libcamera-dev
WORKDIR /s/internal/rpicamera WORKDIR /s/internal/rpicamera
COPY internal/rpicamera . COPY internal/rpicamera .
RUN cd exe && make RUN cd exe && make -j$$(nproc)
FROM $(RPI64_IMAGE) AS rpicamera64 FROM $(RPI64_IMAGE) AS rpicamera64
RUN ["cross-build-start"] RUN ["cross-build-start"]
RUN apt update && apt install -y g++ pkg-config make libcamera-dev RUN apt update && apt install -y g++ pkg-config make libcamera-dev
WORKDIR /s/internal/rpicamera WORKDIR /s/internal/rpicamera
COPY internal/rpicamera . COPY internal/rpicamera .
RUN cd exe && make RUN cd exe && make -j$$(nproc)
FROM $(BASE_IMAGE) FROM $(BASE_IMAGE)
RUN apk add --no-cache zip make git tar RUN apk add --no-cache zip make git tar

Loading…
Cancel
Save