-
-
Notifications
You must be signed in to change notification settings - Fork 56.2k
G-API gst source gray support #21560
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e0cb364
a7e0ea8
a5956e6
198e6e1
52adfdc
987ce91
bd796c8
6795ecf
10bc6fd
338473e
76a6735
3fc50fa
1886642
7a308cb
e7e98ec
10d6b76
8e83d55
add328f
7983d37
7f3fdb7
da64f69
8446626
f74f205
1fae2d0
f8a0dca
155a2cd
1884dd0
d89a8d4
a272fe1
e025398
22282c9
0ce365f
0153a46
273d007
d5f6a53
ba0ad34
02e1acd
68dcd3f
bfeeea3
4edd04f
93248ca
86048f7
08bc99d
bc36b0a
270e827
ad74b03
81a2bb3
181adbe
3fc47e9
630a365
04d495a
436b083
cc70cfe
0a27161
898c60b
9961c68
41212cb
48d4ce2
8306134
487f9b0
574ac9a
561cd42
aa2d906
c1e191a
d7c8cce
9b2a150
ab338ef
29e8acb
be281dd
7e310a6
fe2927d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -28,13 +28,41 @@ GStreamerMediaAdapter::GStreamerMediaAdapter(const cv::GFrameDesc& frameDesc, | |
|
||
GstVideoMeta* videoMeta = gst_buffer_get_video_meta(m_buffer); | ||
if (videoMeta != nullptr) { | ||
m_strides = { videoMeta->stride[0], videoMeta->stride[1] }; | ||
m_offsets = { videoMeta->offset[0], videoMeta->offset[1] }; | ||
switch (m_frameDesc.fmt) { | ||
case cv::MediaFormat::NV12: { | ||
m_strides = { videoMeta->stride[0], videoMeta->stride[1] }; | ||
m_offsets = { videoMeta->offset[0], videoMeta->offset[1] }; | ||
break; | ||
} | ||
case cv::MediaFormat::GRAY: { | ||
m_strides = { videoMeta->stride[0]}; | ||
m_offsets = { videoMeta->offset[0]}; | ||
break; | ||
} | ||
default: { | ||
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here"); | ||
break; | ||
} | ||
} | ||
} else { | ||
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0), | ||
GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 1) }; | ||
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0), | ||
GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 1) }; | ||
switch (m_frameDesc.fmt) { | ||
case cv::MediaFormat::NV12: { | ||
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0), | ||
GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 1) }; | ||
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0), | ||
GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 1) }; | ||
break; | ||
} | ||
case cv::MediaFormat::GRAY: { | ||
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0)}; | ||
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0)}; | ||
break; | ||
} | ||
default: { | ||
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here"); | ||
break; | ||
} | ||
} | ||
} | ||
} | ||
|
||
|
@@ -71,8 +99,10 @@ cv::MediaFrame::View GStreamerMediaAdapter::access(cv::MediaFrame::Access access | |
|
||
if(!m_isMapped.load(std::memory_order_relaxed)) { | ||
|
||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 2); | ||
GAPI_Assert(GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_NV12); | ||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 2 || | ||
GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 1); | ||
GAPI_Assert(GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_NV12 || | ||
GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_GRAY8); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe, these asserts above should be combined, like
But perhaps it looks too heavy |
||
|
||
// TODO: Use RAII for map/unmap | ||
if (access == cv::MediaFrame::Access::W) { | ||
|
@@ -85,27 +115,56 @@ cv::MediaFrame::View GStreamerMediaAdapter::access(cv::MediaFrame::Access access | |
} | ||
|
||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0) == m_strides[0]); | ||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 1) == m_strides[1]); | ||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 0) == m_offsets[0]); | ||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 1) == m_offsets[1]); | ||
if (m_frameDesc.fmt == cv::MediaFormat::NV12) { | ||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 1) == m_strides[1]); | ||
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 1) == m_offsets[1]); | ||
} | ||
|
||
m_isMapped.store(true, std::memory_order_release); | ||
} | ||
} | ||
|
||
cv::MediaFrame::View::Ptrs ps { | ||
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane | ||
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[1], // UV-plane | ||
nullptr, | ||
nullptr | ||
}; | ||
|
||
cv::MediaFrame::View::Strides ss = { | ||
static_cast<std::size_t>(m_strides[0]), // Y-plane stride | ||
static_cast<std::size_t>(m_strides[1]), // UV-plane stride | ||
0u, | ||
0u | ||
}; | ||
cv::MediaFrame::View::Ptrs ps; | ||
cv::MediaFrame::View::Strides ss; | ||
|
||
switch (m_frameDesc.fmt) { | ||
case cv::MediaFormat::NV12: { | ||
AsyaPronina marked this conversation as resolved.
Show resolved
Hide resolved
|
||
ps = { | ||
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane | ||
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[1], // UV-plane | ||
nullptr, | ||
nullptr | ||
}; | ||
ss = { | ||
static_cast<std::size_t>(m_strides[0]), // Y-plane stride | ||
static_cast<std::size_t>(m_strides[1]), // UV-plane stride | ||
0u, | ||
0u | ||
}; | ||
break; | ||
} | ||
case cv::MediaFormat::GRAY: { | ||
ps = { | ||
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane | ||
nullptr, | ||
nullptr, | ||
nullptr | ||
}; | ||
ss = { | ||
static_cast<std::size_t>(m_strides[0]), // Y-plane stride | ||
0u, | ||
0u, | ||
0u | ||
}; | ||
break; | ||
} | ||
default: { | ||
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here"); | ||
break; | ||
} | ||
} | ||
|
||
|
||
--thread_counters; | ||
return cv::MediaFrame::View(std::move(ps), std::move(ss)); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -30,8 +30,9 @@ namespace gst { | |
|
||
#ifdef HAVE_GSTREAMER | ||
|
||
constexpr char NV12_CAPS_STRING[] = | ||
"video/x-raw,format=NV12;video/x-raw(memory:DMABuf),format=NV12"; | ||
constexpr char ALLOWED_CAPS_STRING[] = | ||
"video/x-raw,format=(string){NV12, GRAY8};video/x-raw(memory:DMABuf),format=(string){NV12, GRAY8}"; | ||
|
||
|
||
namespace { | ||
GstPadProbeReturn appsinkQueryCallback(GstPad*, GstPadProbeInfo* info, gpointer) | ||
|
@@ -137,17 +138,17 @@ void GStreamerSource::Priv::configureAppsink() { | |
// Do not emit signals: all calls will be synchronous and blocking. | ||
gst_app_sink_set_emit_signals(GST_APP_SINK(m_appsink.get()), FALSE); | ||
|
||
GStreamerPtr<GstCaps> nv12Caps(gst_caps_from_string(NV12_CAPS_STRING)); | ||
GStreamerPtr<GstCaps> gstCaps(gst_caps_from_string(ALLOWED_CAPS_STRING)); | ||
|
||
GStreamerPtr<GstPad> appsinkPad(gst_element_get_static_pad(m_appsink, "sink")); | ||
GStreamerPtr<GstCaps> peerCaps(gst_pad_peer_query_caps(appsinkPad, NULL)); | ||
if (!gst_caps_can_intersect(peerCaps, nv12Caps)) { | ||
if (!gst_caps_can_intersect(peerCaps, gstCaps)) { | ||
cv::util::throw_error( | ||
std::logic_error("appsink element can only consume video-frame in NV12 format in " | ||
std::logic_error("appsink element can only consume video-frame in NV12 or GRAY8 format in " | ||
"GStreamerSource")); | ||
} | ||
|
||
gst_app_sink_set_caps(GST_APP_SINK(m_appsink.get()), nv12Caps); | ||
gst_app_sink_set_caps(GST_APP_SINK(m_appsink.get()), gstCaps); | ||
|
||
gst_pad_add_probe(appsinkPad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, appsinkQueryCallback, | ||
NULL, NULL); | ||
|
@@ -184,10 +185,29 @@ void GStreamerSource::Priv::prepareVideoMeta() | |
cv::util::throw_error(std::logic_error("Cannot query video width/height.")); | ||
} |
|
|
// Fill GstVideoInfo structure to work further with GstVideoFrame class. | ||
if (!gst_video_info_from_caps(&m_videoInfo, prerollCaps)) { | ||
cv::util::throw_error(std::logic_error("preroll sample has invalid caps.")); | ||
} | ||
m_type = GST_VIDEO_INFO_FORMAT(&m_videoInfo); | ||
switch(m_outputType) { | ||
case GStreamerSource::OutputType::FRAME: { | ||
// Construct metadata for media frame. | ||
m_mediaFrameMeta = GFrameDesc { cv::MediaFormat::NV12, cv::Size(width, height) }; | ||
switch (m_type) { | ||
case GST_VIDEO_FORMAT_NV12: { | ||
m_mediaFrameMeta = GFrameDesc{ cv::MediaFormat::NV12, cv::Size(width, height) }; | ||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2); | ||
break; | ||
} | ||
case GST_VIDEO_FORMAT_GRAY8: { | ||
m_mediaFrameMeta = GFrameDesc{ cv::MediaFormat::GRAY, cv::Size(width, height) }; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so is it There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There are different Gst gray formats. GRAY8 GRAY16. MediaFormat GRAY assumes 8-bit integer numbers. Should we align our naming approach with Gst. I have some doubts that 16 bit will be asked by someone. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
It seem to be useful for some scenarios, see #18694. Furthermore, there are two variants of this format: GRAY16_LE and GRAY16_BE |
||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 1); | ||
break; | ||
} | ||
OrestChura marked this conversation as resolved.
Show resolved
Hide resolved
|
||
default: { | ||
GAPI_Assert(false && "Unsupported GStreamerSource FRAME type."); | ||
} | ||
} | ||
break; | ||
} | ||
case GStreamerSource::OutputType::MAT: { | ||
|
@@ -197,13 +217,6 @@ void GStreamerSource::Priv::prepareVideoMeta() | |
} | ||
} | ||
|
||
// Fill GstVideoInfo structure to work further with GstVideoFrame class. | ||
if (!gst_video_info_from_caps(&m_videoInfo, prerollCaps)) { | ||
cv::util::throw_error(std::logic_error("preroll sample has invalid caps.")); | ||
} | ||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2); | ||
GAPI_Assert(GST_VIDEO_INFO_FORMAT(&m_videoInfo) == GST_VIDEO_FORMAT_NV12); | ||
|
||
m_isMetaPrepared = true; | ||
} | ||
} | ||
|
@@ -272,28 +285,46 @@ bool GStreamerSource::Priv::retrieveFrame(cv::Mat& data) | |
|
||
try | ||
{ | ||
// m_matMeta holds width and height for 8U BGR frame, but actual | ||
// frame m_buffer we request from GStreamer pipeline has 8U NV12 format. | ||
// Constructing y and uv cv::Mat-s from such a m_buffer: | ||
GAPI_Assert((uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 1) == | ||
switch (m_type) { | ||
case GST_VIDEO_FORMAT_NV12: { | ||
// m_matMeta holds width and height for 8U BGR frame, but actual | ||
// frame m_buffer we request from GStreamer pipeline has 8U NV12 format. | ||
// Constructing y and uv cv::Mat-s from such a m_buffer: | ||
GAPI_Assert((uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 1) == | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1)); | ||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2); | ||
|
||
cv::Mat y(m_matMeta.size, CV_8UC1, | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0), | ||
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0)); | ||
cv::Mat uv(m_matMeta.size / 2, CV_8UC2, | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1), | ||
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 1)); | ||
cv::Mat y(m_matMeta.size, CV_8UC1, | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0), | ||
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0)); | ||
cv::Mat uv(m_matMeta.size / 2, CV_8UC2, | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1), | ||
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 1)); | ||
|
||
cv::cvtColorTwoPlane(y, uv, data, cv::COLOR_YUV2BGR_NV12); | ||
cv::cvtColorTwoPlane(y, uv, data, cv::COLOR_YUV2BGR_NV12); | ||
break; | ||
} | ||
case GST_VIDEO_FORMAT_GRAY8: { | ||
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 1); | ||
cv::Mat y(m_matMeta.size, CV_8UC1, | ||
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) + | ||
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0), | ||
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0)); | ||
cv::cvtColor(y, data, cv::COLOR_GRAY2BGR); | ||
break; | ||
} | ||
default: { | ||
GAPI_Assert(false && "retrieveFrame - unsupported GStreamerSource FRAME type."); | ||
} | ||
} | ||
} | ||
catch (...) | ||
{ | ||
gst_video_frame_unmap(&videoFrame); | ||
cv::util::throw_error(std::runtime_error("NV12 buffer conversion to BGR is failed!")); | ||
cv::util::throw_error(std::runtime_error("NV12 or GRAY8 buffer conversion to BGR is failed!")); | ||
} | ||
gst_video_frame_unmap(&videoFrame); | ||
|
||
|
Uh oh!
There was an error while loading. Please reload this page.