summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--include/uvcgadget/meson.build1
-rw-r--r--include/uvcgadget/mjpeg_encoder.hpp89
-rw-r--r--lib/libcamera-source.cpp109
-rw-r--r--lib/meson.build8
-rw-r--r--lib/mjpeg_encoder.cpp217
-rw-r--r--meson.build6
6 files changed, 426 insertions, 4 deletions
diff --git a/include/uvcgadget/meson.build b/include/uvcgadget/meson.build
index a9e4384..af795c4 100644
--- a/include/uvcgadget/meson.build
+++ b/include/uvcgadget/meson.build
@@ -9,6 +9,7 @@ uvcgadget_public_headers = files([
'timer.h',
'v4l2-source.h',
'video-source.h',
+ 'mjpeg_encoder.hpp',
])
diff --git a/include/uvcgadget/mjpeg_encoder.hpp b/include/uvcgadget/mjpeg_encoder.hpp
new file mode 100644
index 0000000..a3e92fc
--- /dev/null
+++ b/include/uvcgadget/mjpeg_encoder.hpp
@@ -0,0 +1,89 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
+ *
+ * mjpeg_encoder.hpp - mjpeg video encoder.
+ */
+
+#pragma once
+
+#include <condition_variable>
+#include <mutex>
+#include <queue>
+#include <thread>
+#include <functional>
+
+struct jpeg_compress_struct;
+typedef std::function<void(void *, size_t, int64_t, unsigned int)> OutputReadyCallback;
+
+struct StreamInfo
+{
+ StreamInfo() : width(0), height(0), stride(0) {}
+ unsigned int width;
+ unsigned int height;
+ unsigned int stride;
+ libcamera::PixelFormat pixel_format;
+ std::optional<libcamera::ColorSpace> colour_space;
+};
+
+class MjpegEncoder
+{
+public:
+ MjpegEncoder();
+ ~MjpegEncoder();
+
+ void EncodeBuffer(void *mem, void *dest, unsigned int size,
+ StreamInfo const &info, int64_t timestamp_us,
+ unsigned int cookie);
+ StreamInfo getStreamInfo(libcamera::Stream *stream);
+ void SetOutputReadyCallback(OutputReadyCallback callback) { output_ready_callback_ = callback; }
+
+private:
+ static const int NUM_ENC_THREADS = 4;
+
+ void encodeThread(int num);
+
+ /*
+ * Handle the output buffers in another thread so as not to block the
+ * encoders. The application can take its time, after which we return
+ * this buffer to the encoder for re-use.
+ */
+ void outputThread();
+
+ bool abortEncode_;
+ bool abortOutput_;
+ uint64_t index_;
+
+ struct EncodeItem
+ {
+ void *mem;
+ void *dest;
+ unsigned int size;
+ StreamInfo info;
+ int64_t timestamp_us;
+ uint64_t index;
+ unsigned int cookie;
+ };
+
+ std::queue<EncodeItem> encode_queue_;
+ std::mutex encode_mutex_;
+ std::condition_variable encode_cond_var_;
+ std::thread encode_thread_[NUM_ENC_THREADS];
+ void encodeJPEG(struct jpeg_compress_struct &cinfo, EncodeItem &item,
+ uint8_t *&encoded_buffer, size_t &buffer_len);
+
+ struct OutputItem
+ {
+ void *mem;
+ size_t bytes_used;
+ int64_t timestamp_us;
+ uint64_t index;
+ unsigned int cookie;
+ };
+
+ std::queue<OutputItem> output_queue_[NUM_ENC_THREADS];
+ std::mutex output_mutex_;
+ std::condition_variable output_cond_var_;
+ std::thread output_thread_;
+ OutputReadyCallback output_ready_callback_ ;
+};
diff --git a/lib/libcamera-source.cpp b/lib/libcamera-source.cpp
index 4e22688..30b67f0 100644
--- a/lib/libcamera-source.cpp
+++ b/lib/libcamera-source.cpp
@@ -16,10 +16,15 @@
#include <string>
#include <string.h>
#include <unistd.h>
+#include <map>
+#include <sys/mman.h>
#include <libcamera/libcamera.h>
#include <linux/videodev2.h>
+#include "config.h"
+#include "mjpeg_encoder.hpp"
+
extern "C" {
#include "events.h"
#include "libcamera-source.h"
@@ -28,6 +33,7 @@ extern "C" {
}
using namespace libcamera;
+using namespace std::placeholders;
#define to_libcamera_source(s) container_of(s, struct libcamera_source, src)
@@ -44,11 +50,35 @@ struct libcamera_source {
std::queue<Request *> completed_requests;
int pfds[2];
+ MjpegEncoder *encoder;
+ std::unordered_map<FrameBuffer *, Span<uint8_t>> mapped_buffers_;
+
struct video_buffer_set buffers;
+ void mapBuffer(const std::unique_ptr<FrameBuffer> &buffer);
void requestComplete(Request *request);
+ void outputReady(void *mem, size_t bytesused, int64_t timestamp, unsigned int cookie);
};
+void libcamera_source::mapBuffer(const std::unique_ptr<FrameBuffer> &buffer)
+{
+ size_t buffer_size = 0;
+
+ for (unsigned int i = 0; i < buffer->planes().size(); i++) {
+ const FrameBuffer::Plane &plane = buffer->planes()[i];
+ buffer_size += plane.length;
+
+ if (i == buffer->planes().size() - 1 ||
+ plane.fd.get() != buffer->planes()[i + 1].fd.get()) {
+ void *memory = mmap(NULL, buffer_size, PROT_READ | PROT_WRITE,
+ MAP_SHARED, plane.fd.get(), 0);
+ mapped_buffers_[buffer.get()] =
+ Span<uint8_t>(static_cast<uint8_t *>(memory), buffer_size);
+ buffer_size = 0;
+ }
+ }
+}
+
void libcamera_source::requestComplete(Request *request)
{
if (request->status() == Request::RequestCancelled)
@@ -65,9 +95,22 @@ void libcamera_source::requestComplete(Request *request)
write(pfds[1], "x", 1);
};
+void libcamera_source::outputReady(void *mem, size_t bytesused, int64_t timestamp, unsigned int cookie)
+{
+ struct video_buffer buffer;
+
+ buffer.index = cookie;
+ buffer.mem = mem;
+ buffer.bytesused = bytesused;
+ buffer.timestamp = { timestamp / 1000000, timestamp % 1000000 };
+
+ src.handler(src.handler_data, &src, &buffer);
+}
+
static void libcamera_source_video_process(void *d)
{
struct libcamera_source *src = (struct libcamera_source *)d;
+ Stream *stream = src->config->at(0).stream();
struct video_buffer buffer;
Request *request;
@@ -80,6 +123,25 @@ static void libcamera_source_video_process(void *d)
/* We have only a single buffer per request, so just pick the first */
FrameBuffer *framebuf = request->buffers().begin()->second;
+ /*
+ * If we have an encoder, then rather than simply detailing the buffer
+ * here and passing it back to the sink we need to queue it to the
+ * encoder. The encoder will queue that buffer to the sink after
+ * compression.
+ */
+ if (src->src.type == VIDEO_SOURCE_ENCODED) {
+ int64_t timestamp_ns = framebuf->metadata().timestamp;
+ StreamInfo info = src->encoder->getStreamInfo(stream);
+ auto span = src->mapped_buffers_.find(framebuf);
+ void *mem = span->second.data();
+ void *dest = src->buffers.buffers[request->cookie()].mem;
+ unsigned int size = span->second.size();
+
+ src->encoder->EncodeBuffer(mem, dest, size, info, timestamp_ns / 1000, request->cookie());
+
+ return;
+ }
+
buffer.index = request->cookie();
/* TODO: Correct this for formats libcamera treats as multiplanar */
@@ -113,13 +175,33 @@ static int libcamera_source_set_format(struct video_source *s,
{
struct libcamera_source *src = to_libcamera_source(s);
StreamConfiguration &streamConfig = src->config->at(0);
+ __u32 chosen_pixelformat = fmt->pixelformat;
streamConfig.size.width = fmt->width;
streamConfig.size.height = fmt->height;
- streamConfig.pixelFormat = PixelFormat(fmt->pixelformat);
+ streamConfig.pixelFormat = PixelFormat(chosen_pixelformat);
src->config->validate();
+#ifdef CONFIG_CAN_ENCODE
+ /*
+ * If the user requests MJPEG but the camera can't supply it, try again
+ * with YUV420 and initialise an MjpegEncoder to compress the data.
+ */
+ if (chosen_pixelformat == V4L2_PIX_FMT_MJPEG &&
+ streamConfig.pixelFormat.fourcc() != chosen_pixelformat) {
+ std::cout << "MJPEG format not natively supported; encoding YUV420" << std::endl;
+
+ src->encoder = new MjpegEncoder();
+ src->encoder->SetOutputReadyCallback(std::bind(&libcamera_source::outputReady, src, _1, _2, _3, _4));
+
+ streamConfig.pixelFormat = PixelFormat(V4L2_PIX_FMT_YUV420);
+ src->src.type = VIDEO_SOURCE_ENCODED;
+
+ src->config->validate();
+ }
+#endif
+
if (fmt->pixelformat != streamConfig.pixelFormat.fourcc())
std::cerr << "Warning: set_format: Requested format unavailable" << std::endl;
@@ -132,7 +214,7 @@ static int libcamera_source_set_format(struct video_source *s,
fmt->width = streamConfig.size.width;
fmt->height = streamConfig.size.height;
- fmt->pixelformat = streamConfig.pixelFormat.fourcc();
+ fmt->pixelformat = src->encoder ? V4L2_PIX_FMT_MJPEG : streamConfig.pixelFormat.fourcc();
fmt->field = V4L2_FIELD_ANY;
/* TODO: Can we use libcamera helpers to get image size / stride? */
@@ -181,6 +263,11 @@ static int libcamera_source_alloc_buffers(struct video_source *s, unsigned int n
const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator->buffers(stream);
src->buffers.nbufs = buffers.size();
+ if (src->src.type == VIDEO_SOURCE_ENCODED) {
+ for (const std::unique_ptr<FrameBuffer> &buffer : buffers)
+ src->mapBuffer(buffer);
+ }
+
src->buffers.buffers = (video_buffer *)calloc(src->buffers.nbufs, sizeof(*src->buffers.buffers));
if (!src->buffers.buffers) {
std::cerr << "failed to allocate buffers" << std::endl;
@@ -243,6 +330,11 @@ static int libcamera_source_free_buffers(struct video_source *s)
struct libcamera_source *src = to_libcamera_source(s);
Stream *stream = src->config->at(0).stream();
+ for (auto &[buf, span] : src->mapped_buffers_)
+ munmap(span.data(), span.size());
+
+ src->mapped_buffers_.clear();
+
src->allocator->free(stream);
delete src->allocator;
free(src->buffers.buffers);
@@ -313,6 +405,18 @@ static int libcamera_source_stream_off(struct video_source *s)
while (!src->completed_requests.empty())
src->completed_requests.pop();
+ if (src->src.type == VIDEO_SOURCE_ENCODED) {
+ delete src->encoder;
+ src->encoder = nullptr;
+ }
+
+ /*
+ * We need to reinitialise this here, as if the user selected an
+ * unsupported MJPEG format the encoding routine will have overriden
+ * this setting.
+ */
+ src->src.type = VIDEO_SOURCE_DMABUF;
+
return 0;
}
@@ -403,6 +507,7 @@ struct video_source *libcamera_source_create(const char *devname)
src->src.ops = &libcamera_source_ops;
src->src.type = VIDEO_SOURCE_DMABUF;
+
src->cm = std::make_unique<CameraManager>();
src->cm->start();
diff --git a/lib/meson.build b/lib/meson.build
index 0931eef..f2626fa 100644
--- a/lib/meson.build
+++ b/lib/meson.build
@@ -19,12 +19,16 @@ if libcamera.found()
libuvcgadget_sources += files(['libcamera-source.cpp'])
endif
+if libjpeg.found() and threads.found()
+ libuvcgadget_sources += files(['mjpeg_encoder.cpp'])
+endif
+
libuvcgadget = shared_library('uvcgadget',
libuvcgadget_sources,
- dependencies : libcamera,
+ dependencies : [libcamera, libjpeg, threads],
version : uvc_gadget_version,
install : true,
- include_directories : includes)
+ include_directories : [includes, config_includes])
libuvcgadget_dep = declare_dependency(sources : [
uvcgadget_public_headers,
diff --git a/lib/mjpeg_encoder.cpp b/lib/mjpeg_encoder.cpp
new file mode 100644
index 0000000..41a13d5
--- /dev/null
+++ b/lib/mjpeg_encoder.cpp
@@ -0,0 +1,217 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
+ *
+ * mjpeg_encoder.cpp - mjpeg video encoder.
+ */
+
+#include <chrono>
+#include <iostream>
+#include <pthread.h>
+
+#include <jpeglib.h>
+
+#include <libcamera/libcamera.h>
+
+#include "mjpeg_encoder.hpp"
+
+#if JPEG_LIB_VERSION_MAJOR > 9 || (JPEG_LIB_VERSION_MAJOR == 9 && JPEG_LIB_VERSION_MINOR >= 4)
+typedef size_t jpeg_mem_len_t;
+#else
+typedef unsigned long jpeg_mem_len_t;
+#endif
+
+MjpegEncoder::MjpegEncoder()
+ : abortEncode_(false), abortOutput_(false), index_(0)
+{
+ output_thread_ = std::thread(&MjpegEncoder::outputThread, this);
+ for (int i = 0; i < NUM_ENC_THREADS; i++)
+ encode_thread_[i] = std::thread(std::bind(&MjpegEncoder::encodeThread, this, i));
+}
+
+MjpegEncoder::~MjpegEncoder()
+{
+ abortEncode_ = true;
+ for (int i = 0; i < NUM_ENC_THREADS; i++)
+ encode_thread_[i].join();
+ abortOutput_ = true;
+ output_thread_.join();
+}
+
+void MjpegEncoder::EncodeBuffer(void *mem, void *dest, unsigned int size,
+ StreamInfo const &info, int64_t timestamp_us,
+ unsigned int cookie)
+{
+ std::lock_guard<std::mutex> lock(encode_mutex_);
+ EncodeItem item = { mem, dest, size, info, timestamp_us, index_++, cookie };
+
+ encode_queue_.push(item);
+ encode_cond_var_.notify_all();
+}
+
+void MjpegEncoder::encodeJPEG(struct jpeg_compress_struct &cinfo, EncodeItem &item,
+ uint8_t *&encoded_buffer, size_t &buffer_len)
+{
+ cinfo.image_width = item.info.width;
+ cinfo.image_height = item.info.height;
+ cinfo.input_components = 3;
+ cinfo.in_color_space = JCS_YCbCr;
+ cinfo.restart_interval = 0;
+
+ jpeg_set_defaults(&cinfo);
+ cinfo.raw_data_in = TRUE;
+ jpeg_set_quality(&cinfo, 50, TRUE);
+
+ jpeg_mem_len_t jpeg_mem_len = buffer_len;
+ jpeg_mem_dest(&cinfo, &encoded_buffer, &jpeg_mem_len);
+ jpeg_start_compress(&cinfo, TRUE);
+
+ int stride2 = item.info.stride / 2;
+ uint8_t *Y = (uint8_t *)item.mem;
+ uint8_t *U = (uint8_t *)Y + item.info.stride * item.info.height;
+ uint8_t *V = (uint8_t *)U + stride2 * (item.info.height / 2);
+ uint8_t *Y_max = U - item.info.stride;
+ uint8_t *U_max = V - stride2;
+ uint8_t *V_max = U_max + stride2 * (item.info.height / 2);
+
+ JSAMPROW y_rows[16];
+ JSAMPROW u_rows[8];
+ JSAMPROW v_rows[8];
+
+ for (uint8_t *Y_row = Y, *U_row = U, *V_row = V; cinfo.next_scanline < item.info.height;)
+ {
+ for (int i = 0; i < 16; i++, Y_row += item.info.stride)
+ y_rows[i] = std::min(Y_row, Y_max);
+ for (int i = 0; i < 8; i++, U_row += stride2, V_row += stride2) {
+ u_rows[i] = std::min(U_row, U_max);
+ v_rows[i] = std::min(V_row, V_max);
+ }
+
+ JSAMPARRAY rows[] = { y_rows, u_rows, v_rows };
+ jpeg_write_raw_data(&cinfo, rows, 16);
+ }
+
+ jpeg_finish_compress(&cinfo);
+
+ buffer_len = jpeg_mem_len;
+}
+
+void MjpegEncoder::encodeThread(int num)
+{
+ struct jpeg_compress_struct cinfo;
+ struct jpeg_error_mgr jerr;
+ EncodeItem encode_item;
+ uint32_t frames = 0;
+
+ cinfo.err = jpeg_std_error(&jerr);
+ jpeg_create_compress(&cinfo);
+
+ while (true)
+ {
+ {
+ std::unique_lock<std::mutex> lock(encode_mutex_);
+ while (true)
+ {
+ using namespace std::chrono_literals;
+ if (abortEncode_ && encode_queue_.empty())
+ {
+ jpeg_destroy_compress(&cinfo);
+ return;
+ }
+ if (!encode_queue_.empty())
+ {
+ encode_item = encode_queue_.front();
+ encode_queue_.pop();
+ break;
+ }
+ else
+ encode_cond_var_.wait_for(lock, 200ms);
+ }
+ }
+
+ uint8_t *encoded_buffer = (uint8_t *)encode_item.dest;
+ size_t buffer_len = encode_item.size;
+
+ encodeJPEG(cinfo, encode_item, encoded_buffer, buffer_len);
+
+ frames++;
+
+ /*
+ * Don't return buffers until the output thread as that's where
+ * they're in order again.
+ *
+ * We push this encoded buffer to another thread so that our
+ * application can take its time with the data without blocking
+ * the encode process.
+ */
+ OutputItem output_item = {
+ encoded_buffer,
+ buffer_len,
+ encode_item.timestamp_us,
+ encode_item.index,
+ encode_item.cookie
+ };
+ std::lock_guard<std::mutex> lock(output_mutex_);
+ output_queue_[num].push(output_item);
+ output_cond_var_.notify_one();
+ }
+}
+
+void MjpegEncoder::outputThread()
+{
+ OutputItem item;
+ uint64_t index = 0;
+ while (true)
+ {
+ {
+ std::unique_lock<std::mutex> lock(output_mutex_);
+ while (true)
+ {
+ using namespace std::chrono_literals;
+
+ /*
+ * We look for the thread that's completed the
+ * frame we want next. If we don't find it, we
+ * wait.
+ *
+ * Must also check for an abort signal and if
+ * set, all queues must be empty. This is done
+ * first to ensure all frame callbacks have a
+ * chance to run.
+ */
+ bool abort = abortOutput_ ? true : false;
+ for (auto &q : output_queue_)
+ {
+ if (abort && !q.empty())
+ abort = false;
+
+ if (!q.empty() && q.front().index == index)
+ {
+ item = q.front();
+ q.pop();
+ goto got_item;
+ }
+ }
+ if (abort)
+ return;
+
+ output_cond_var_.wait_for(lock, 200ms);
+ }
+ }
+ got_item:
+ output_ready_callback_(item.mem, item.bytes_used, item.timestamp_us, item.cookie);
+ index++;
+ }
+}
+
+StreamInfo MjpegEncoder::getStreamInfo(libcamera::Stream *stream)
+{
+ libcamera::StreamConfiguration const &cfg = stream->configuration();
+ StreamInfo info;
+ info.width = cfg.size.width;
+ info.height = cfg.size.height;
+ info.stride = cfg.stride;
+ info.pixel_format = cfg.pixelFormat;
+ info.colour_space = cfg.colorSpace;
+ return info;
+} \ No newline at end of file
diff --git a/meson.build b/meson.build
index 8101676..a8b54d1 100644
--- a/meson.build
+++ b/meson.build
@@ -54,6 +54,8 @@ summary({ 'Sources': uvc_gadget_git_version, }, section : 'Versions')
cc = meson.get_compiler('c')
libcamera = dependency('libcamera', required : false)
+libjpeg = dependency('libjpeg', required : false)
+threads = dependency('threads', required : false)
conf = configuration_data()
@@ -61,6 +63,10 @@ if libcamera.found()
conf.set('HAVE_LIBCAMERA', true)
endif
+if libjpeg.found() and threads.found()
+ conf.set('CONFIG_CAN_ENCODE', true)
+endif
+
configure_file(output : 'config.h', configuration : conf)
config_includes = include_directories('.')