Milestone 5: deliver embedded RDP sessions and lifecycle hardening

This commit is contained in:
Keith Smith
2026-03-03 18:59:26 -07:00
parent 230a401386
commit 36006bd4aa
2941 changed files with 724359 additions and 77 deletions

View File

@@ -0,0 +1,55 @@
# FreeRDP: A Remote Desktop Protocol Implementation
# FreeRDP cmake build script
#
# Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
define_channel_client("rdpecam")
if(NOT WITH_SWSCALE OR NOT WITH_FFMPEG)
message(FATAL_ERROR "WITH_FFMPEG and WITH_SWSCALE required for CHANNEL_RDPECAM_CLIENT")
endif()
freerdp_client_pc_add_requires_private("libswscale")
# currently camera redirect client supported for platforms with Video4Linux only
find_package(FFmpeg REQUIRED COMPONENTS SWSCALE)
find_package(V4L)
if(V4L_FOUND)
set(WITH_V4L ON)
add_compile_definitions("WITH_V4L")
else()
message(FATAL_ERROR "libv4l-dev required for CHANNEL_RDPECAM_CLIENT")
endif()
option(RDPECAM_INPUT_FORMAT_H264 "[MS-RDPECAM] Enable H264 camera format (passthrough)" ON)
if(RDPECAM_INPUT_FORMAT_H264)
add_compile_definitions("WITH_INPUT_FORMAT_H264")
endif()
option(RDPECAM_INPUT_FORMAT_MJPG "[MS-RDPECAM] Enable MJPG camera format" ON)
if(RDPECAM_INPUT_FORMAT_MJPG)
add_compile_definitions("WITH_INPUT_FORMAT_MJPG")
endif()
include_directories(SYSTEM ${SWSCALE_INCLUDE_DIRS})
set(${MODULE_PREFIX}_SRCS camera_device_enum_main.c camera_device_main.c encoding.c)
set(${MODULE_PREFIX}_LIBS freerdp winpr ${SWSCALE_LIBRARIES} ${FFMPEG_LIBRARIES})
add_channel_client_library(${MODULE_PREFIX} ${MODULE_NAME} ${CHANNEL_NAME} TRUE "DVCPluginEntry")
if(V4L_FOUND)
add_channel_client_subsystem(${MODULE_PREFIX} ${CHANNEL_NAME} "v4l" "")
endif()

View File

@@ -0,0 +1,295 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, main header file
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FREERDP_CLIENT_CAMERA_H
#define FREERDP_CLIENT_CAMERA_H
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#if defined(WITH_INPUT_FORMAT_MJPG)
#include <libavcodec/avcodec.h>
#endif
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <winpr/wlog.h>
#include <winpr/wtypes.h>
#include <freerdp/api.h>
#include <freerdp/types.h>
#include <freerdp/client/channels.h>
#include <freerdp/channels/log.h>
#include <freerdp/channels/rdpecam.h>
#include <freerdp/codecs.h>
#include <freerdp/primitives.h>
#define ECAM_PROTO_VERSION 0x02
/* currently supporting 1 stream per device */
#define ECAM_DEVICE_MAX_STREAMS 1
#define ECAM_MAX_MEDIA_TYPE_DESCRIPTORS 256
/* Allow to send up to that many unsolicited samples.
* For example, to support 30 fps with 250 ms round trip
* ECAM_MAX_SAMPLE_CREDITS has to be at least 8.
*/
#define ECAM_MAX_SAMPLE_CREDITS 8
/* Having this hardcoded allows to preallocate and reuse buffer
* for sample responses. Excessive size is to make sure any sample
* will fit in, even with highest resolution.
*/
#define ECAM_SAMPLE_RESPONSE_BUFFER_SIZE (1024ULL * 4050ULL)
/* Special format addition for CAM_MEDIA_FORMAT enum formats
* used to support H264 stream muxed in MJPG container stream.
* The value picked not to overlap with enum values
*/
#define CAM_MEDIA_FORMAT_MJPG_H264 0x0401
typedef struct s_ICamHal ICamHal;
typedef struct
{
IWTSPlugin iface;
IWTSListener* listener;
GENERIC_LISTENER_CALLBACK* hlistener;
/* HAL interface */
ICamHal* ihal;
char* subsystem;
BOOL initialized;
BOOL attached;
UINT32 version;
wHashTable* devices;
} CameraPlugin;
typedef struct
{
CAM_MEDIA_FORMAT inputFormat; /* camera side */
CAM_MEDIA_FORMAT outputFormat; /* network side */
} CAM_MEDIA_FORMAT_INFO;
typedef struct
{
BOOL streaming;
CAM_MEDIA_FORMAT_INFO formats;
CAM_MEDIA_TYPE_DESCRIPTION currMediaType;
GENERIC_CHANNEL_CALLBACK* hSampleReqChannel;
CRITICAL_SECTION lock;
volatile LONG samplesRequested;
wStream* pendingSample;
volatile BOOL haveSample;
wStream* sampleRespBuffer;
H264_CONTEXT* h264;
#if defined(WITH_INPUT_FORMAT_MJPG)
AVCodecContext* avContext;
AVPacket* avInputPkt;
AVFrame* avOutFrame;
#endif
#if defined(WITH_INPUT_FORMAT_H264)
size_t h264FrameMaxSize;
BYTE* h264Frame;
#endif
/* sws_scale */
uint32_t swsWidth;
uint32_t swsHeight;
struct SwsContext* sws;
} CameraDeviceStream;
WINPR_ATTR_NODISCARD
static inline CAM_MEDIA_FORMAT streamInputFormat(CameraDeviceStream* stream)
{
return stream->formats.inputFormat;
}
WINPR_ATTR_NODISCARD
static inline CAM_MEDIA_FORMAT streamOutputFormat(CameraDeviceStream* stream)
{
return stream->formats.outputFormat;
}
typedef struct
{
IWTSListener* listener;
GENERIC_LISTENER_CALLBACK* hlistener;
CameraPlugin* ecam;
ICamHal* ihal; /* HAL interface, same as used by CameraPlugin */
char deviceId[32];
CameraDeviceStream streams[ECAM_DEVICE_MAX_STREAMS];
} CameraDevice;
/**
* Subsystem (Hardware Abstraction Layer, HAL) Interface
*/
typedef UINT (*ICamHalEnumCallback)(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceId, const char* deviceName);
/* may run in context of different thread */
typedef UINT (*ICamHalSampleCapturedCallback)(CameraDevice* dev, size_t streamIndex,
const BYTE* sample, size_t size);
/** @brief interface to implement for the camera HAL*/
struct s_ICamHal
{
/** callback to enumerate available camera calling callback for each found item
*
* @param ihal the hal interface
* @param callback the enum callback
* @param ecam the camera plugin
* @param hchannel the generic freerdp channel
* @return the number of found cameras
*/
WINPR_ATTR_NODISCARD UINT (*Enumerate)(ICamHal* ihal, ICamHalEnumCallback callback,
CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel);
/**
* callback to activate a given camera device
* @param ihal the hal interface
* @param deviceId the name of the device
* @param errorCode a pointer to an error code set if the call failed
* @return if the operation was successful
* @since 3.18.0
*/
WINPR_ATTR_NODISCARD BOOL (*Activate)(ICamHal* ihal, const char* deviceId,
CAM_ERROR_CODE* errorCode);
/**
* callback to deactivate a given camera device
* @param ihal the hal interface
* @param deviceId the name of the device
* @param errorCode a pointer to an error code set if the call failed
* @return if the operation was successful
* @since 3.18.0
*/
WINPR_ATTR_NODISCARD BOOL (*Deactivate)(ICamHal* ihal, const char* deviceId,
CAM_ERROR_CODE* errorCode);
/**
* callback that returns the list of compatible media types given a set of supported formats
* @param ihal the hal interface
* @param deviceId the name of the device
* @param streamIndex stream index number
* @param supportedFormats a pointer to supported formats
* @param nSupportedFormats number of supported formats
* @param mediaTypes resulting media type descriptors
* @param nMediaTypes output number of media descriptors
* @return number of matched supported formats
*/
WINPR_ATTR_NODISCARD INT16 (*GetMediaTypeDescriptions)(
ICamHal* ihal, const char* deviceId, size_t streamIndex,
const CAM_MEDIA_FORMAT_INFO* supportedFormats, size_t nSupportedFormats,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes, size_t* nMediaTypes);
/**
* callback to start a stream
* @param ihal the hal interface
* @param dev
* @param streamIndex stream index number
* @param mediaType
* @param callback
* @return \b CAM_ERROR_CODE_None on success, a CAM_Error otherwise
*/
WINPR_ATTR_NODISCARD CAM_ERROR_CODE (*StartStream)(ICamHal* ihal, CameraDevice* dev,
size_t streamIndex,
const CAM_MEDIA_TYPE_DESCRIPTION* mediaType,
ICamHalSampleCapturedCallback callback);
/**
* callback to stop a stream
* @param ihal the hal interface
* @param deviceId the name of the device
* @param streamIndex stream index number
* @return \b CAM_ERROR_CODE_None on success, a CAM_Error otherwise
*/
CAM_ERROR_CODE (*StopStream)(ICamHal* ihal, const char* deviceId, size_t streamIndex);
/**
* callback to free the ICamHal
* @param hal the hal interface
* @return \b CAM_ERROR_CODE_None on success, a CAM_Error otherwise
*/
CAM_ERROR_CODE (*Free)(ICamHal* ihal);
};
typedef UINT (*PREGISTERCAMERAHAL)(IWTSPlugin* plugin, ICamHal* hal);
typedef struct
{
IWTSPlugin* plugin;
WINPR_ATTR_NODISCARD PREGISTERCAMERAHAL pRegisterCameraHal;
CameraPlugin* ecam;
const ADDIN_ARGV* args;
} FREERDP_CAMERA_HAL_ENTRY_POINTS;
typedef FREERDP_CAMERA_HAL_ENTRY_POINTS* PFREERDP_CAMERA_HAL_ENTRY_POINTS;
/* entry point called by addin manager */
typedef UINT(VCAPITYPE* PFREERDP_CAMERA_HAL_ENTRY)(PFREERDP_CAMERA_HAL_ENTRY_POINTS pEntryPoints);
/* common functions */
WINPR_ATTR_NODISCARD FREERDP_LOCAL UINT ecam_channel_send_generic_msg(
CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel, CAM_MSG_ID msg);
FREERDP_LOCAL UINT ecam_channel_send_error_response(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_ERROR_CODE code);
WINPR_ATTR_NODISCARD FREERDP_LOCAL UINT ecam_channel_write(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MSG_ID msg, wStream* out,
BOOL freeStream);
/* ecam device interface */
FREERDP_LOCAL void ecam_dev_destroy(CameraDevice* dev);
WINPR_ATTR_MALLOC(ecam_dev_destroy, 1)
WINPR_ATTR_NODISCARD
FREERDP_LOCAL CameraDevice* ecam_dev_create(CameraPlugin* ecam, const char* deviceId,
const char* deviceName);
/* video encoding interface */
WINPR_ATTR_NODISCARD FREERDP_LOCAL BOOL ecam_encoder_context_init(CameraDeviceStream* stream);
FREERDP_LOCAL BOOL ecam_encoder_context_free(CameraDeviceStream* stream);
WINPR_ATTR_NODISCARD FREERDP_LOCAL BOOL ecam_encoder_compress(CameraDeviceStream* stream,
const BYTE* srcData, size_t srcSize,
BYTE** ppDstData, size_t* pDstSize);
WINPR_ATTR_NODISCARD FREERDP_LOCAL UINT32 h264_get_max_bitrate(UINT32 height);
#endif /* FREERDP_CLIENT_CAMERA_H */

View File

@@ -0,0 +1,567 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Device Enumeration Channel
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include <winpr/cast.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-enum.client")
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_send_error_response(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_ERROR_CODE code)
{
CAM_MSG_ID msg = CAM_MSG_ID_ErrorResponse;
WINPR_ASSERT(ecam);
wStream* s = Stream_New(nullptr, CAM_HEADER_SIZE + 4);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
Stream_Write_UINT32(s, code);
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_send_generic_msg(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MSG_ID msg)
{
WINPR_ASSERT(ecam);
wStream* s = Stream_New(nullptr, CAM_HEADER_SIZE);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_write(WINPR_ATTR_UNUSED CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MSG_ID msg, wStream* out, BOOL freeStream)
{
if (!hchannel || !out)
return ERROR_INVALID_PARAMETER;
Stream_SealLength(out);
WINPR_ASSERT(Stream_Length(out) <= UINT32_MAX);
WLog_DBG(TAG, "ChannelId=%" PRIu32 ", MessageId=0x%02" PRIx8 ", Length=%" PRIuz,
hchannel->channel_mgr->GetChannelId(hchannel->channel), msg, Stream_Length(out));
const UINT error = hchannel->channel->Write(hchannel->channel, (ULONG)Stream_Length(out),
Stream_Buffer(out), nullptr);
if (freeStream)
Stream_Free(out, TRUE);
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_send_device_added_notification(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceName, const char* channelName)
{
CAM_MSG_ID msg = CAM_MSG_ID_DeviceAddedNotification;
WINPR_ASSERT(ecam);
wStream* s = Stream_New(nullptr, 256);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
size_t devNameLen = strlen(deviceName);
if (Stream_Write_UTF16_String_From_UTF8(s, devNameLen + 1, deviceName, devNameLen, TRUE) < 0)
{
Stream_Free(s, TRUE);
return ERROR_INTERNAL_ERROR;
}
Stream_Write(s, channelName, strlen(channelName) + 1);
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_ihal_device_added_callback(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceId, const char* deviceName)
{
WLog_DBG(TAG, "deviceId=%s, deviceName=%s", deviceId, deviceName);
if (!HashTable_ContainsKey(ecam->devices, deviceId))
{
CameraDevice* dev = ecam_dev_create(ecam, deviceId, deviceName);
if (!HashTable_Insert(ecam->devices, deviceId, dev))
{
ecam_dev_destroy(dev);
return ERROR_INTERNAL_ERROR;
}
}
else
{
WLog_DBG(TAG, "Device %s already exists", deviceId);
}
ecam_send_device_added_notification(ecam, hchannel, deviceName, deviceId /*channelName*/);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_enumerate_devices(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel)
{
return ecam->ihal->Enumerate(ecam->ihal, ecam_ihal_device_added_callback, ecam, hchannel);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_process_select_version_response(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel,
WINPR_ATTR_UNUSED wStream* s, BYTE serverVersion)
{
const BYTE clientVersion = ECAM_PROTO_VERSION;
/* check remaining s capacity */
WLog_DBG(TAG, "ServerVersion=%" PRIu8 ", ClientVersion=%" PRIu8, serverVersion, clientVersion);
if (serverVersion > clientVersion)
{
WLog_ERR(TAG,
"Incompatible protocol version server=%" PRIu8 ", client supports version=%" PRIu8,
serverVersion, clientVersion);
return CHANNEL_RC_OK;
}
ecam->version = serverVersion;
if (ecam->ihal)
ecam_enumerate_devices(ecam, hchannel);
else
WLog_ERR(TAG, "No HAL registered");
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_data_received(IWTSVirtualChannelCallback* pChannelCallback, wStream* data)
{
UINT error = CHANNEL_RC_OK;
BYTE version = 0;
BYTE messageId = 0;
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
if (!hchannel || !data)
return ERROR_INVALID_PARAMETER;
CameraPlugin* ecam = (CameraPlugin*)hchannel->plugin;
if (!ecam)
return ERROR_INTERNAL_ERROR;
if (!Stream_CheckAndLogRequiredCapacity(TAG, data, CAM_HEADER_SIZE))
return ERROR_NO_DATA;
Stream_Read_UINT8(data, version);
Stream_Read_UINT8(data, messageId);
WLog_DBG(TAG, "ChannelId=%" PRIu32 ", MessageId=0x%02" PRIx8 ", Version=%d",
hchannel->channel_mgr->GetChannelId(hchannel->channel), messageId, version);
switch (messageId)
{
case CAM_MSG_ID_SelectVersionResponse:
error = ecam_process_select_version_response(ecam, hchannel, data, version);
break;
default:
WLog_WARN(TAG, "unknown MessageId=0x%02" PRIx8 "", messageId);
error = ERROR_INVALID_DATA;
ecam_channel_send_error_response(ecam, hchannel, CAM_ERROR_CODE_OperationNotSupported);
break;
}
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_open(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
CameraPlugin* ecam = (CameraPlugin*)hchannel->plugin;
WINPR_ASSERT(ecam);
WLog_DBG(TAG, "entered");
return ecam_channel_send_generic_msg(ecam, hchannel, CAM_MSG_ID_SelectVersionRequest);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_close(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
WLog_DBG(TAG, "entered");
free(hchannel);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_new_channel_connection(IWTSListenerCallback* pListenerCallback,
IWTSVirtualChannel* pChannel,
WINPR_ATTR_UNUSED BYTE* Data,
WINPR_ATTR_UNUSED BOOL* pbAccept,
IWTSVirtualChannelCallback** ppCallback)
{
GENERIC_LISTENER_CALLBACK* hlistener = (GENERIC_LISTENER_CALLBACK*)pListenerCallback;
if (!hlistener || !hlistener->plugin)
return ERROR_INTERNAL_ERROR;
WLog_DBG(TAG, "entered");
GENERIC_CHANNEL_CALLBACK* hchannel =
(GENERIC_CHANNEL_CALLBACK*)calloc(1, sizeof(GENERIC_CHANNEL_CALLBACK));
if (!hchannel)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
hchannel->iface.OnDataReceived = ecam_on_data_received;
hchannel->iface.OnOpen = ecam_on_open;
hchannel->iface.OnClose = ecam_on_close;
hchannel->plugin = hlistener->plugin;
hchannel->channel_mgr = hlistener->channel_mgr;
hchannel->channel = pChannel;
*ppCallback = (IWTSVirtualChannelCallback*)hchannel;
return CHANNEL_RC_OK;
}
static void ecam_dev_destroy_pv(void* obj)
{
CameraDevice* dev = obj;
ecam_dev_destroy(dev);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_initialize(IWTSPlugin* pPlugin, IWTSVirtualChannelManager* pChannelMgr)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
WLog_DBG(TAG, "entered");
if (!ecam || !pChannelMgr)
return ERROR_INVALID_PARAMETER;
if (ecam->initialized)
{
WLog_ERR(TAG, "[%s] plugin initialized twice, aborting", RDPECAM_CONTROL_DVC_CHANNEL_NAME);
return ERROR_INVALID_DATA;
}
ecam->version = ECAM_PROTO_VERSION;
ecam->devices = HashTable_New(FALSE);
if (!ecam->devices)
{
WLog_ERR(TAG, "HashTable_New failed!");
return CHANNEL_RC_NO_MEMORY;
}
HashTable_SetupForStringData(ecam->devices, FALSE);
wObject* obj = HashTable_ValueObject(ecam->devices);
WINPR_ASSERT(obj);
obj->fnObjectFree = ecam_dev_destroy_pv;
ecam->hlistener = (GENERIC_LISTENER_CALLBACK*)calloc(1, sizeof(GENERIC_LISTENER_CALLBACK));
if (!ecam->hlistener)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
ecam->hlistener->iface.OnNewChannelConnection = ecam_on_new_channel_connection;
ecam->hlistener->plugin = pPlugin;
ecam->hlistener->channel_mgr = pChannelMgr;
const UINT rc = pChannelMgr->CreateListener(pChannelMgr, RDPECAM_CONTROL_DVC_CHANNEL_NAME, 0,
&ecam->hlistener->iface, &ecam->listener);
ecam->initialized = (rc == CHANNEL_RC_OK);
return rc;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_terminated(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
if (!ecam)
return ERROR_INVALID_DATA;
WLog_DBG(TAG, "entered");
if (ecam->hlistener)
{
IWTSVirtualChannelManager* mgr = ecam->hlistener->channel_mgr;
if (mgr)
IFCALL(mgr->DestroyListener, mgr, ecam->listener);
}
free(ecam->hlistener);
HashTable_Free(ecam->devices);
UINT rc = CHANNEL_RC_OK;
if (ecam->ihal)
rc = ecam->ihal->Free(ecam->ihal);
free(ecam);
return rc;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_attached(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
UINT error = CHANNEL_RC_OK;
if (!ecam)
return ERROR_INVALID_PARAMETER;
ecam->attached = TRUE;
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_detached(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
UINT error = CHANNEL_RC_OK;
if (!ecam)
return ERROR_INVALID_PARAMETER;
ecam->attached = FALSE;
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_register_hal_plugin(IWTSPlugin* pPlugin, ICamHal* ihal)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
WINPR_ASSERT(ecam);
if (ecam->ihal)
{
WLog_DBG(TAG, "already registered");
return ERROR_ALREADY_EXISTS;
}
WLog_DBG(TAG, "HAL registered");
ecam->ihal = ihal;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_load_hal_plugin(CameraPlugin* ecam, const char* name, const ADDIN_ARGV* args)
{
WINPR_ASSERT(ecam);
FREERDP_CAMERA_HAL_ENTRY_POINTS entryPoints = WINPR_C_ARRAY_INIT;
UINT error = ERROR_INTERNAL_ERROR;
union
{
PVIRTUALCHANNELENTRY pvce;
const PFREERDP_CAMERA_HAL_ENTRY entry;
} cnv;
cnv.pvce = freerdp_load_channel_addin_entry(RDPECAM_CHANNEL_NAME, name, nullptr, 0);
if (cnv.entry == nullptr)
{
WLog_ERR(TAG,
"freerdp_load_channel_addin_entry did not return any function pointers for %s ",
name);
return ERROR_INVALID_FUNCTION;
}
entryPoints.plugin = &ecam->iface;
entryPoints.pRegisterCameraHal = ecam_register_hal_plugin;
entryPoints.args = args;
entryPoints.ecam = ecam;
error = cnv.entry(&entryPoints);
if (error)
{
WLog_ERR(TAG, "%s entry returned error %" PRIu32 ".", name, error);
return error;
}
WLog_INFO(TAG, "Loaded %s HAL for ecam", name);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
FREERDP_ENTRY_POINT(UINT VCAPITYPE rdpecam_DVCPluginEntry(IDRDYNVC_ENTRY_POINTS* pEntryPoints))
{
UINT error = CHANNEL_RC_INITIALIZATION_ERROR;
WINPR_ASSERT(pEntryPoints);
WINPR_ASSERT(pEntryPoints->GetPlugin);
CameraPlugin* ecam = (CameraPlugin*)pEntryPoints->GetPlugin(pEntryPoints, RDPECAM_CHANNEL_NAME);
if (ecam != nullptr)
return CHANNEL_RC_ALREADY_INITIALIZED;
ecam = (CameraPlugin*)calloc(1, sizeof(CameraPlugin));
if (!ecam)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
ecam->attached = TRUE;
ecam->iface.Initialize = ecam_plugin_initialize;
ecam->iface.Connected = nullptr; /* server connects to client */
ecam->iface.Disconnected = nullptr;
ecam->iface.Terminated = ecam_plugin_terminated;
ecam->iface.Attached = ecam_plugin_attached;
ecam->iface.Detached = ecam_plugin_detached;
/* TODO: camera redirect only supported for platforms with Video4Linux */
#if defined(WITH_V4L)
ecam->subsystem = "v4l";
#else
ecam->subsystem = nullptr;
#endif
if (ecam->subsystem)
{
if ((error = ecam_load_hal_plugin(ecam, ecam->subsystem, nullptr /*args*/)))
{
WLog_ERR(TAG,
"Unable to load camera redirection subsystem %s because of error %" PRIu32 "",
ecam->subsystem, error);
goto out;
}
}
error = pEntryPoints->RegisterPlugin(pEntryPoints, RDPECAM_CHANNEL_NAME, &ecam->iface);
if (error == CHANNEL_RC_OK)
return error;
out:
ecam_plugin_terminated(&ecam->iface);
return error;
}

View File

@@ -0,0 +1,926 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Device Channels
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include <winpr/cast.h>
#include <winpr/interlocked.h>
#include "camera.h"
#include "rdpecam-utils.h"
#define TAG CHANNELS_TAG("rdpecam-device.client")
/* supported formats in preference order:
* H264, MJPG, I420 (used as input for H264 encoder), other YUV based, RGB based
*/
static const CAM_MEDIA_FORMAT_INFO supportedFormats[] = {
/* inputFormat, outputFormat */
#if defined(WITH_INPUT_FORMAT_H264)
{ CAM_MEDIA_FORMAT_H264, CAM_MEDIA_FORMAT_H264 }, /* passthrough */
{ CAM_MEDIA_FORMAT_MJPG_H264, CAM_MEDIA_FORMAT_H264 },
#endif
#if defined(WITH_INPUT_FORMAT_MJPG)
{ CAM_MEDIA_FORMAT_MJPG, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_MJPG, CAM_MEDIA_FORMAT_MJPG },
#endif
{ CAM_MEDIA_FORMAT_I420, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_YUY2, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_NV12, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_RGB24, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_RGB32, CAM_MEDIA_FORMAT_H264 },
};
static const size_t nSupportedFormats = ARRAYSIZE(supportedFormats);
static void ecam_dev_write_media_type(wStream* s, CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, mediaType->Format));
Stream_Write_UINT32(s, mediaType->Width);
Stream_Write_UINT32(s, mediaType->Height);
Stream_Write_UINT32(s, mediaType->FrameRateNumerator);
Stream_Write_UINT32(s, mediaType->FrameRateDenominator);
Stream_Write_UINT32(s, mediaType->PixelAspectRatioNumerator);
Stream_Write_UINT32(s, mediaType->PixelAspectRatioDenominator);
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, mediaType->Flags));
}
static BOOL ecam_dev_read_media_type(wStream* s, CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
const uint8_t format = Stream_Get_UINT8(s);
if (!rdpecam_valid_CamMediaFormat(format))
return FALSE;
mediaType->Format = WINPR_ASSERTING_INT_CAST(CAM_MEDIA_FORMAT, format);
Stream_Read_UINT32(s, mediaType->Width);
Stream_Read_UINT32(s, mediaType->Height);
Stream_Read_UINT32(s, mediaType->FrameRateNumerator);
Stream_Read_UINT32(s, mediaType->FrameRateDenominator);
Stream_Read_UINT32(s, mediaType->PixelAspectRatioNumerator);
Stream_Read_UINT32(s, mediaType->PixelAspectRatioDenominator);
const uint8_t flags = Stream_Get_UINT8(s);
if (!rdpecam_valid_MediaTypeDescriptionFlags(flags))
return FALSE;
mediaType->Flags = WINPR_ASSERTING_INT_CAST(CAM_MEDIA_TYPE_DESCRIPTION_FLAGS, flags);
return TRUE;
}
static void ecam_dev_print_media_type(CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
WLog_DBG(TAG, "Format: %u, width: %u, height: %u, fps: %u, flags: %u", mediaType->Format,
mediaType->Width, mediaType->Height, mediaType->FrameRateNumerator, mediaType->Flags);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_sample_response(CameraDevice* dev, size_t streamIndex, const BYTE* sample,
size_t size)
{
WINPR_ASSERT(dev);
CameraDeviceStream* stream = &dev->streams[streamIndex];
CAM_MSG_ID msg = CAM_MSG_ID_SampleResponse;
Stream_ResetPosition(stream->sampleRespBuffer);
Stream_Write_UINT8(stream->sampleRespBuffer,
WINPR_ASSERTING_INT_CAST(uint8_t, dev->ecam->version));
Stream_Write_UINT8(stream->sampleRespBuffer, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
Stream_Write_UINT8(stream->sampleRespBuffer, WINPR_ASSERTING_INT_CAST(uint8_t, streamIndex));
Stream_Write(stream->sampleRespBuffer, sample, size);
/* channel write is protected by critical section in dvcman_write_channel */
return ecam_channel_write(dev->ecam, stream->hSampleReqChannel, msg, stream->sampleRespBuffer,
FALSE /* don't free stream */);
}
static BOOL mediaSupportDrops(CAM_MEDIA_FORMAT format)
{
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
return FALSE;
default:
return TRUE;
}
}
static UINT ecam_dev_send_pending(CameraDevice* dev, size_t streamIndex, CameraDeviceStream* stream)
{
WINPR_ASSERT(dev);
WINPR_ASSERT(stream);
if (stream->samplesRequested <= 0)
{
WLog_VRB(TAG, "Frame delayed: No sample requested");
return CHANNEL_RC_OK;
}
if (!stream->haveSample)
{
WLog_VRB(TAG, "Frame response delayed: No sample available");
return CHANNEL_RC_OK;
}
BYTE* encodedSample = Stream_Buffer(stream->pendingSample);
size_t encodedSize = Stream_Length(stream->pendingSample);
if (streamInputFormat(stream) != streamOutputFormat(stream))
{
if (!ecam_encoder_compress(stream, encodedSample, encodedSize, &encodedSample,
&encodedSize))
{
WLog_DBG(TAG, "Frame dropped: error in ecam_encoder_compress");
stream->haveSample = FALSE;
return CHANNEL_RC_OK;
}
if (!stream->streaming)
{
WLog_DBG(TAG, "Frame delayed/dropped: stream stopped");
return CHANNEL_RC_OK;
}
}
stream->samplesRequested--;
stream->haveSample = FALSE;
return ecam_dev_send_sample_response(dev, streamIndex, encodedSample, encodedSize);
}
static UINT ecam_dev_sample_captured_callback(CameraDevice* dev, size_t streamIndex,
const BYTE* sample, size_t size)
{
WINPR_ASSERT(dev);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
return ERROR_INVALID_INDEX;
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (!stream->streaming)
{
WLog_DBG(TAG, "Frame drop: stream not running");
return CHANNEL_RC_OK;
}
EnterCriticalSection(&stream->lock);
UINT ret = CHANNEL_RC_NO_MEMORY;
/* If we already have a waiting sample, let's see if the input format support dropping
* frames so that we could just "refresh" the pending sample, otherwise we must wait until
* a frame request flushes it
*/
if (stream->haveSample && !mediaSupportDrops(stream->formats.inputFormat))
{
/* we can't drop samples, so we have to wait until the pending sample is
* sent, by a sample request.
*
* When we're here we already have a sample ready to be sent, the delay between 2 frames
* seems like a reasonable wait delay. For instance 60 FPS means a frame every 16ms.
* We also cap that wait delay to not spinloop and not get stuck for too long.
* */
DWORD waitDelay = (1000 * stream->currMediaType.FrameRateDenominator) /
stream->currMediaType.FrameRateNumerator;
if (waitDelay < 16)
waitDelay = 16;
if (waitDelay > 100)
waitDelay = 100;
while (stream->haveSample && stream->streaming)
{
LeaveCriticalSection(&stream->lock);
SleepEx(waitDelay, TRUE);
EnterCriticalSection(&stream->lock);
}
if (!stream->streaming)
{
WLog_DBG(TAG, "Frame drop: stream not running");
ret = CHANNEL_RC_OK;
goto out;
}
}
Stream_ResetPosition(stream->pendingSample);
if (!Stream_EnsureRemainingCapacity(stream->pendingSample, size))
goto out;
Stream_Write(stream->pendingSample, sample, size);
Stream_SealLength(stream->pendingSample);
stream->haveSample = TRUE;
ret = ecam_dev_send_pending(dev, streamIndex, stream);
out:
LeaveCriticalSection(&stream->lock);
return ret;
}
static void ecam_dev_stop_stream(CameraDevice* dev, size_t streamIndex)
{
WINPR_ASSERT(dev);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
return;
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->streaming)
{
stream->streaming = FALSE;
dev->ihal->StopStream(dev->ihal, dev->deviceId, 0);
DeleteCriticalSection(&stream->lock);
}
Stream_Free(stream->sampleRespBuffer, TRUE);
stream->sampleRespBuffer = nullptr;
Stream_Free(stream->pendingSample, TRUE);
stream->pendingSample = nullptr;
ecam_encoder_context_free(stream);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_stop_streams_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
WINPR_ASSERT(dev);
WINPR_UNUSED(s);
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_start_streams_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
BYTE streamIndex = 0;
CAM_MEDIA_TYPE_DESCRIPTION mediaType = WINPR_C_ARRAY_INIT;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1 + 26))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %" PRIu8, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
if (!ecam_dev_read_media_type(s, &mediaType))
{
WLog_ERR(TAG, "Unable to read MEDIA_TYPE_DESCRIPTION");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidMessage);
return ERROR_INVALID_DATA;
}
ecam_dev_print_media_type(&mediaType);
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->streaming)
{
WLog_ERR(TAG, "Streaming already in progress, device %s, streamIndex %d", dev->deviceId,
streamIndex);
return CAM_ERROR_CODE_UnexpectedError;
}
/* saving media type description for CurrentMediaTypeRequest,
* to be done before calling ecam_encoder_context_init
*/
stream->currMediaType = mediaType;
/* initialize encoder, if input and output formats differ */
if (streamInputFormat(stream) != streamOutputFormat(stream) &&
!ecam_encoder_context_init(stream))
{
WLog_ERR(TAG, "stream_ecam_encoder_init failed");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_UnexpectedError);
return ERROR_INVALID_DATA;
}
stream->sampleRespBuffer = Stream_New(nullptr, ECAM_SAMPLE_RESPONSE_BUFFER_SIZE);
if (!stream->sampleRespBuffer)
{
WLog_ERR(TAG, "Stream_New failed");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return ERROR_INVALID_DATA;
}
/* replacing outputFormat with inputFormat in mediaType before starting stream */
mediaType.Format = streamInputFormat(stream);
stream->samplesRequested = 0;
stream->haveSample = FALSE;
if (!InitializeCriticalSectionEx(&stream->lock, 0, 0))
{
WLog_ERR(TAG, "InitializeCriticalSectionEx failed");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return ERROR_INVALID_DATA;
}
stream->pendingSample = Stream_New(nullptr, 4ull * mediaType.Width * mediaType.Height);
if (!stream->pendingSample)
{
WLog_ERR(TAG, "pending stream failed");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return ERROR_INVALID_DATA;
}
const CAM_ERROR_CODE error = dev->ihal->StartStream(dev->ihal, dev, streamIndex, &mediaType,
ecam_dev_sample_captured_callback);
if (error)
{
WLog_ERR(TAG, "StartStream failure");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, error);
return ERROR_INVALID_DATA;
}
stream->streaming = TRUE;
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_property_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
WINPR_ATTR_UNUSED wStream* s)
{
WINPR_ASSERT(dev);
// TODO: supported properties implementation
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_PropertyListResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_current_media_type_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
CAM_MSG_ID msg = CAM_MSG_ID_CurrentMediaTypeResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(nullptr, CAM_HEADER_SIZE + sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, dev->ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
ecam_dev_write_media_type(s, mediaType);
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_sample_request(CameraDevice* dev, GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
BYTE streamIndex = 0;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
EnterCriticalSection(&stream->lock);
/* need to save channel because responses are asynchronous and coming from capture thread */
if (stream->hSampleReqChannel != hchannel)
stream->hSampleReqChannel = hchannel;
stream->samplesRequested++;
const UINT ret = ecam_dev_send_pending(dev, streamIndex, stream);
LeaveCriticalSection(&stream->lock);
return ret;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_current_media_type_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
BYTE streamIndex = 0;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->currMediaType.Format == 0)
{
WLog_ERR(TAG, "Current media type unknown for streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_NotInitialized);
return ERROR_DEVICE_REINITIALIZATION_NEEDED;
}
return ecam_dev_send_current_media_type_response(dev, hchannel, &stream->currMediaType);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_media_type_list_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes,
size_t nMediaTypes)
{
CAM_MSG_ID msg = CAM_MSG_ID_MediaTypeListResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(nullptr, CAM_HEADER_SIZE + ECAM_MAX_MEDIA_TYPE_DESCRIPTORS *
sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, dev->ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
for (size_t i = 0; i < nMediaTypes; i++, mediaTypes++)
{
ecam_dev_write_media_type(s, mediaTypes);
}
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_media_type_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
UINT error = CHANNEL_RC_OK;
BYTE streamIndex = 0;
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes = nullptr;
size_t nMediaTypes = ECAM_MAX_MEDIA_TYPE_DESCRIPTORS;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
mediaTypes =
(CAM_MEDIA_TYPE_DESCRIPTION*)calloc(nMediaTypes, sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!mediaTypes)
{
WLog_ERR(TAG, "calloc failed");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return CHANNEL_RC_NO_MEMORY;
}
INT16 formatIndex =
dev->ihal->GetMediaTypeDescriptions(dev->ihal, dev->deviceId, streamIndex, supportedFormats,
nSupportedFormats, mediaTypes, &nMediaTypes);
if (formatIndex == -1 || nMediaTypes == 0)
{
WLog_ERR(TAG, "Camera doesn't support any compatible video formats");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_ItemNotFound);
error = ERROR_DEVICE_FEATURE_NOT_SUPPORTED;
goto error;
}
stream->formats = supportedFormats[formatIndex];
/* replacing inputFormat with outputFormat in mediaTypes before sending response */
for (size_t i = 0; i < nMediaTypes; i++)
{
mediaTypes[i].Format = streamOutputFormat(stream);
mediaTypes[i].Flags = CAM_MEDIA_TYPE_DESCRIPTION_FLAG_DecodingRequired;
}
if (stream->currMediaType.Format == 0)
{
/* saving 1st media type description for CurrentMediaTypeRequest */
stream->currMediaType = mediaTypes[0];
}
error = ecam_dev_send_media_type_list_response(dev, hchannel, mediaTypes, nMediaTypes);
error:
free(mediaTypes);
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_stream_list_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel)
{
CAM_MSG_ID msg = CAM_MSG_ID_StreamListResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(nullptr, CAM_HEADER_SIZE + sizeof(CAM_STREAM_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, dev->ecam->version));
Stream_Write_UINT8(s, WINPR_ASSERTING_INT_CAST(uint8_t, msg));
/* single stream description */
Stream_Write_UINT16(s, CAM_STREAM_FRAME_SOURCE_TYPE_Color);
Stream_Write_UINT8(s, CAM_STREAM_CATEGORY_Capture);
Stream_Write_UINT8(s, TRUE /* Selected */);
Stream_Write_UINT8(s, FALSE /* CanBeShared */);
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_stream_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
WINPR_ATTR_UNUSED wStream* s)
{
return ecam_dev_send_stream_list_response(dev, hchannel);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_activate_device_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
WINPR_ATTR_UNUSED wStream* s)
{
WINPR_ASSERT(dev);
CAM_ERROR_CODE errorCode = CAM_ERROR_CODE_None;
if (dev->ihal->Activate(dev->ihal, dev->deviceId, &errorCode))
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
return ecam_channel_send_error_response(dev->ecam, hchannel, errorCode);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_deactivate_device_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
WINPR_ASSERT(dev);
WINPR_UNUSED(s);
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
CAM_ERROR_CODE errorCode = CAM_ERROR_CODE_None;
if (dev->ihal->Deactivate(dev->ihal, dev->deviceId, &errorCode))
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
return ecam_channel_send_error_response(dev->ecam, hchannel, errorCode);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_data_received(IWTSVirtualChannelCallback* pChannelCallback, wStream* data)
{
UINT error = CHANNEL_RC_OK;
BYTE version = 0;
BYTE messageId = 0;
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
if (!hchannel || !data)
return ERROR_INVALID_PARAMETER;
CameraDevice* dev = (CameraDevice*)hchannel->plugin;
if (!dev)
return ERROR_INTERNAL_ERROR;
if (!Stream_CheckAndLogRequiredCapacity(TAG, data, CAM_HEADER_SIZE))
return ERROR_NO_DATA;
Stream_Read_UINT8(data, version);
Stream_Read_UINT8(data, messageId);
WLog_DBG(TAG, "ChannelId=%" PRIu32 ", MessageId=0x%02" PRIx8 ", Version=%d",
hchannel->channel_mgr->GetChannelId(hchannel->channel), messageId, version);
switch (messageId)
{
case CAM_MSG_ID_ActivateDeviceRequest:
error = ecam_dev_process_activate_device_request(dev, hchannel, data);
break;
case CAM_MSG_ID_DeactivateDeviceRequest:
error = ecam_dev_process_deactivate_device_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StreamListRequest:
error = ecam_dev_process_stream_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_MediaTypeListRequest:
error = ecam_dev_process_media_type_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_CurrentMediaTypeRequest:
error = ecam_dev_process_current_media_type_request(dev, hchannel, data);
break;
case CAM_MSG_ID_PropertyListRequest:
error = ecam_dev_process_property_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StartStreamsRequest:
error = ecam_dev_process_start_streams_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StopStreamsRequest:
error = ecam_dev_process_stop_streams_request(dev, hchannel, data);
break;
case CAM_MSG_ID_SampleRequest:
error = ecam_dev_process_sample_request(dev, hchannel, data);
break;
default:
WLog_WARN(TAG, "unknown MessageId=0x%02" PRIx8 "", messageId);
error = ERROR_INVALID_DATA;
ecam_channel_send_error_response(dev->ecam, hchannel,
CAM_ERROR_CODE_OperationNotSupported);
break;
}
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_open(WINPR_ATTR_UNUSED IWTSVirtualChannelCallback* pChannelCallback)
{
WLog_DBG(TAG, "entered");
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_close(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
CameraDevice* dev = (CameraDevice*)hchannel->plugin;
WINPR_ASSERT(dev);
WLog_DBG(TAG, "entered");
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
/* make sure this channel is not used for sample responses */
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
if (dev->streams[i].hSampleReqChannel == hchannel)
dev->streams[i].hSampleReqChannel = nullptr;
free(hchannel);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_new_channel_connection(IWTSListenerCallback* pListenerCallback,
IWTSVirtualChannel* pChannel,
WINPR_ATTR_UNUSED BYTE* Data,
WINPR_ATTR_UNUSED BOOL* pbAccept,
IWTSVirtualChannelCallback** ppCallback)
{
GENERIC_LISTENER_CALLBACK* hlistener = (GENERIC_LISTENER_CALLBACK*)pListenerCallback;
if (!hlistener || !hlistener->plugin)
return ERROR_INTERNAL_ERROR;
WLog_DBG(TAG, "entered");
GENERIC_CHANNEL_CALLBACK* hchannel =
(GENERIC_CHANNEL_CALLBACK*)calloc(1, sizeof(GENERIC_CHANNEL_CALLBACK));
if (!hchannel)
{
WLog_ERR(TAG, "calloc failed");
return CHANNEL_RC_NO_MEMORY;
}
hchannel->iface.OnDataReceived = ecam_dev_on_data_received;
hchannel->iface.OnOpen = ecam_dev_on_open;
hchannel->iface.OnClose = ecam_dev_on_close;
hchannel->plugin = hlistener->plugin;
hchannel->channel_mgr = hlistener->channel_mgr;
hchannel->channel = pChannel;
*ppCallback = (IWTSVirtualChannelCallback*)hchannel;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return CameraDevice pointer or nullptr in case of error
*/
CameraDevice* ecam_dev_create(CameraPlugin* ecam, const char* deviceId,
WINPR_ATTR_UNUSED const char* deviceName)
{
WINPR_ASSERT(ecam);
WINPR_ASSERT(ecam->hlistener);
IWTSVirtualChannelManager* pChannelMgr = ecam->hlistener->channel_mgr;
WINPR_ASSERT(pChannelMgr);
WLog_DBG(TAG, "entered for %s", deviceId);
CameraDevice* dev = (CameraDevice*)calloc(1, sizeof(CameraDevice));
if (!dev)
{
WLog_ERR(TAG, "calloc failed");
return nullptr;
}
dev->ecam = ecam;
dev->ihal = ecam->ihal;
strncpy(dev->deviceId, deviceId, sizeof(dev->deviceId) - 1);
dev->hlistener = (GENERIC_LISTENER_CALLBACK*)calloc(1, sizeof(GENERIC_LISTENER_CALLBACK));
if (!dev->hlistener)
{
free(dev);
WLog_ERR(TAG, "calloc failed");
return nullptr;
}
dev->hlistener->iface.OnNewChannelConnection = ecam_dev_on_new_channel_connection;
dev->hlistener->plugin = (IWTSPlugin*)dev;
dev->hlistener->channel_mgr = pChannelMgr;
if (CHANNEL_RC_OK != pChannelMgr->CreateListener(pChannelMgr, deviceId, 0,
&dev->hlistener->iface, &dev->listener))
{
free(dev->hlistener);
free(dev);
WLog_ERR(TAG, "CreateListener failed");
return nullptr;
}
return dev;
}
/**
* Function description
*
* OBJECT_FREE_FN for devices hash table value
*
*/
void ecam_dev_destroy(CameraDevice* dev)
{
if (!dev)
return;
WLog_DBG(TAG, "entered for %s", dev->deviceId);
if (dev->hlistener)
{
IWTSVirtualChannelManager* mgr = dev->hlistener->channel_mgr;
if (mgr)
IFCALL(mgr->DestroyListener, mgr, dev->listener);
}
free(dev->hlistener);
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
free(dev);
}

View File

@@ -0,0 +1,642 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Video Encoding
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include <winpr/winpr.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-video.client")
#if defined(WITH_INPUT_FORMAT_H264)
/*
* demux a H264 frame from a MJPG container
* args:
* srcData - pointer to buffer with h264 muxed in MJPG container
* srcSize - buff size
* h264_data - pointer to h264 data
* h264_max_size - maximum size allowed by h264_data buffer
*
* Credits:
* guvcview http://guvcview.sourceforge.net
* Paulo Assis <pj.assis@gmail.com>
*
* see Figure 5 Payload Size in USB_Video_Payload_H 264_1 0.pdf
* for format details
*
* @return: data size and copies demuxed data to h264 buffer
*/
static size_t demux_uvcH264(const BYTE* srcData, size_t srcSize, BYTE* h264_data,
size_t h264_max_size)
{
WINPR_ASSERT(h264_data);
WINPR_ASSERT(srcData);
if (srcSize < 30)
{
WLog_ERR(TAG, "Expected srcSize >= 30, got %" PRIuz, srcSize);
return 0;
}
const uint8_t* spl = nullptr;
uint8_t* ph264 = h264_data;
/* search for 1st APP4 marker
* (30 = 2 APP4 marker + 2 length + 22 header + 4 payload size)
*/
for (const uint8_t* sp = srcData; sp < srcData + srcSize - 30; sp++)
{
if (sp[0] == 0xFF && sp[1] == 0xE4)
{
spl = sp + 2; /* exclude APP4 marker */
break;
}
}
if (spl == nullptr)
{
WLog_ERR(TAG, "Expected 1st APP4 marker but none found");
return 0;
}
if (spl > srcData + srcSize - 4)
{
WLog_ERR(TAG, "Payload + Header size bigger than srcData buffer");
return 0;
}
/* 1st segment length in big endian
* includes payload size + header + 6 bytes (2 length + 4 payload size)
*/
uint16_t length = (uint16_t)(spl[0] << 8) & UINT16_MAX;
length |= (uint16_t)spl[1];
spl += 2; /* header */
/* header length in little endian at offset 2 */
uint16_t header_length = (uint16_t)spl[2];
header_length |= (uint16_t)spl[3] << 8;
spl += header_length;
if (spl > srcData + srcSize)
{
WLog_ERR(TAG, "Header size bigger than srcData buffer");
return 0;
}
/* payload size in little endian */
uint32_t payload_size = (uint32_t)spl[0] << 0;
payload_size |= (uint32_t)spl[1] << 8;
payload_size |= (uint32_t)spl[2] << 16;
payload_size |= (uint32_t)spl[3] << 24;
if (payload_size > h264_max_size)
{
WLog_ERR(TAG, "Payload size bigger than h264_data buffer");
return 0;
}
spl += 4; /* payload start */
const uint8_t* epl = spl + payload_size; /* payload end */
if (epl > srcData + srcSize)
{
WLog_ERR(TAG, "Payload size bigger than srcData buffer");
return 0;
}
length -= header_length + 6;
/* copy 1st segment to h264 buffer */
memcpy(ph264, spl, length);
ph264 += length;
spl += length;
/* copy other segments */
while (epl > spl + 4)
{
if (spl[0] != 0xFF || spl[1] != 0xE4)
{
WLog_ERR(TAG, "Expected 2nd+ APP4 marker but none found");
const intptr_t diff = ph264 - h264_data;
return WINPR_ASSERTING_INT_CAST(size_t, diff);
}
/* 2nd+ segment length in big endian */
length = (uint16_t)(spl[2] << 8) & UINT16_MAX;
length |= (uint16_t)spl[3];
if (length < 2)
{
WLog_ERR(TAG, "Expected 2nd+ APP4 length >= 2 but have %" PRIu16, length);
return 0;
}
length -= 2;
spl += 4; /* APP4 marker + length */
/* copy segment to h264 buffer */
memcpy(ph264, spl, length);
ph264 += length;
spl += length;
}
const intptr_t diff = ph264 - h264_data;
return WINPR_ASSERTING_INT_CAST(size_t, diff);
}
#endif
/**
* Function description
*
* @return bitrate in bps
*/
UINT32 h264_get_max_bitrate(UINT32 height)
{
static struct Bitrates
{
UINT32 height;
UINT32 bitrate; /* kbps */
} bitrates[] = {
/* source: https://livekit.io/webrtc/bitrate-guide (webcam streaming)
*
* sorted by height in descending order
*/
{ 1080, 2700 }, { 720, 1250 }, { 480, 700 }, { 360, 400 },
{ 240, 170 }, { 180, 140 }, { 0, 100 },
};
const size_t nBitrates = ARRAYSIZE(bitrates);
for (size_t i = 0; i < nBitrates; i++)
{
if (height >= bitrates[i].height)
{
UINT32 bitrate = bitrates[i].bitrate;
WLog_DBG(TAG, "Setting h264 max bitrate: %u kbps", bitrate);
return bitrate * 1000;
}
}
WINPR_ASSERT(FALSE);
return 0;
}
/**
* Function description
*
* @return enum AVPixelFormat value
*/
static enum AVPixelFormat ecamToAVPixFormat(CAM_MEDIA_FORMAT ecamFormat)
{
switch (ecamFormat)
{
case CAM_MEDIA_FORMAT_YUY2:
return AV_PIX_FMT_YUYV422;
case CAM_MEDIA_FORMAT_NV12:
return AV_PIX_FMT_NV12;
case CAM_MEDIA_FORMAT_I420:
return AV_PIX_FMT_YUV420P;
case CAM_MEDIA_FORMAT_RGB24:
return AV_PIX_FMT_RGB24;
case CAM_MEDIA_FORMAT_RGB32:
return AV_PIX_FMT_RGB32;
default:
WLog_ERR(TAG, "Unsupported ecamFormat %u", ecamFormat);
return AV_PIX_FMT_NONE;
}
}
static void ecam_sws_free(CameraDeviceStream* stream)
{
if (stream->sws)
{
sws_freeContext(stream->sws);
stream->sws = nullptr;
}
}
static BOOL ecam_sws_valid(const CameraDeviceStream* stream)
{
if (!stream->sws)
return FALSE;
if (stream->swsWidth != stream->currMediaType.Width)
return FALSE;
if (stream->swsHeight != stream->currMediaType.Height)
return FALSE;
if (stream->currMediaType.Width > INT32_MAX)
return FALSE;
if (stream->currMediaType.Height > INT32_MAX)
return FALSE;
return TRUE;
}
/**
* Function description
* initialize libswscale
*
* @return success/failure
*/
static BOOL ecam_init_sws_context(CameraDeviceStream* stream, enum AVPixelFormat pixFormat)
{
WINPR_ASSERT(stream);
if (stream->currMediaType.Width > INT32_MAX)
return FALSE;
if (stream->currMediaType.Height > INT32_MAX)
return FALSE;
if (ecam_sws_valid(stream))
return TRUE;
ecam_sws_free(stream);
/* replacing deprecated JPEG formats, still produced by decoder */
switch (pixFormat)
{
case AV_PIX_FMT_YUVJ411P:
pixFormat = AV_PIX_FMT_YUV411P;
break;
case AV_PIX_FMT_YUVJ420P:
pixFormat = AV_PIX_FMT_YUV420P;
break;
case AV_PIX_FMT_YUVJ422P:
pixFormat = AV_PIX_FMT_YUV422P;
break;
case AV_PIX_FMT_YUVJ440P:
pixFormat = AV_PIX_FMT_YUV440P;
break;
case AV_PIX_FMT_YUVJ444P:
pixFormat = AV_PIX_FMT_YUV444P;
break;
default:
break;
}
stream->swsWidth = stream->currMediaType.Width;
stream->swsHeight = stream->currMediaType.Height;
const int width = WINPR_ASSERTING_INT_CAST(int, stream->currMediaType.Width);
const int height = WINPR_ASSERTING_INT_CAST(int, stream->currMediaType.Height);
const enum AVPixelFormat outPixFormat =
h264_context_get_option(stream->h264, H264_CONTEXT_OPTION_HW_ACCEL) ? AV_PIX_FMT_NV12
: AV_PIX_FMT_YUV420P;
stream->sws = sws_getContext(width, height, pixFormat, width, height, outPixFormat, 0, nullptr,
nullptr, nullptr);
if (!stream->sws)
{
WLog_ERR(TAG, "sws_getContext failed");
return FALSE;
}
return TRUE;
}
/**
* Function description
*
* @return success/failure
*/
static BOOL ecam_encoder_compress_h264(CameraDeviceStream* stream, const BYTE* srcData,
size_t srcSize, BYTE** ppDstData, size_t* pDstSize)
{
UINT32 dstSize = 0;
BYTE* srcSlice[4] = WINPR_C_ARRAY_INIT;
int srcLineSizes[4] = WINPR_C_ARRAY_INIT;
BYTE* yuvData[3] = WINPR_C_ARRAY_INIT;
UINT32 yuvLineSizes[3] = WINPR_C_ARRAY_INIT;
prim_size_t size = { stream->currMediaType.Width, stream->currMediaType.Height };
CAM_MEDIA_FORMAT inputFormat = streamInputFormat(stream);
enum AVPixelFormat pixFormat = AV_PIX_FMT_NONE;
#if defined(WITH_INPUT_FORMAT_H264)
if (inputFormat == CAM_MEDIA_FORMAT_MJPG_H264)
{
const size_t rc =
demux_uvcH264(srcData, srcSize, stream->h264Frame, stream->h264FrameMaxSize);
dstSize = WINPR_ASSERTING_INT_CAST(uint32_t, rc);
*ppDstData = stream->h264Frame;
*pDstSize = dstSize;
return dstSize > 0;
}
else
#endif
#if defined(WITH_INPUT_FORMAT_MJPG)
if (inputFormat == CAM_MEDIA_FORMAT_MJPG)
{
stream->avInputPkt->data = WINPR_CAST_CONST_PTR_AWAY(srcData, uint8_t*);
WINPR_ASSERT(srcSize <= INT32_MAX);
stream->avInputPkt->size = (int)srcSize;
if (avcodec_send_packet(stream->avContext, stream->avInputPkt) < 0)
{
WLog_ERR(TAG, "avcodec_send_packet failed");
return FALSE;
}
if (avcodec_receive_frame(stream->avContext, stream->avOutFrame) < 0)
{
WLog_ERR(TAG, "avcodec_receive_frame failed");
return FALSE;
}
for (size_t i = 0; i < 4; i++)
{
srcSlice[i] = stream->avOutFrame->data[i];
srcLineSizes[i] = stream->avOutFrame->linesize[i];
}
/* get pixFormat produced by MJPEG decoder */
pixFormat = stream->avContext->pix_fmt;
}
else
#endif
{
pixFormat = ecamToAVPixFormat(inputFormat);
if (av_image_fill_linesizes(srcLineSizes, pixFormat, (int)size.width) < 0)
{
WLog_ERR(TAG, "av_image_fill_linesizes failed");
return FALSE;
}
if (av_image_fill_pointers(srcSlice, pixFormat, (int)size.height,
WINPR_CAST_CONST_PTR_AWAY(srcData, BYTE*), srcLineSizes) < 0)
{
WLog_ERR(TAG, "av_image_fill_pointers failed");
return FALSE;
}
}
/* get buffers for YUV420P or NV12 */
if (h264_get_yuv_buffer(stream->h264, 0, size.width, size.height, yuvData, yuvLineSizes) < 0)
return FALSE;
/* convert from source format to YUV420P or NV12 */
if (!ecam_init_sws_context(stream, pixFormat))
return FALSE;
const BYTE* cSrcSlice[4] = { srcSlice[0], srcSlice[1], srcSlice[2], srcSlice[3] };
if (sws_scale(stream->sws, cSrcSlice, srcLineSizes, 0, (int)size.height, yuvData,
(int*)yuvLineSizes) <= 0)
return FALSE;
/* encode from YUV420P or NV12 to H264 */
if (h264_compress(stream->h264, ppDstData, &dstSize) < 0)
return FALSE;
*pDstSize = dstSize;
return TRUE;
}
/**
* Function description
*
*/
static void ecam_encoder_context_free_h264(CameraDeviceStream* stream)
{
WINPR_ASSERT(stream);
ecam_sws_free(stream);
#if defined(WITH_INPUT_FORMAT_MJPG)
if (stream->avOutFrame)
av_frame_free(&stream->avOutFrame); /* sets to nullptr */
if (stream->avInputPkt)
{
stream->avInputPkt->data = nullptr;
stream->avInputPkt->size = 0;
av_packet_free(&stream->avInputPkt); /* sets to nullptr */
}
if (stream->avContext)
avcodec_free_context(&stream->avContext); /* sets to nullptr */
#endif
#if defined(WITH_INPUT_FORMAT_H264)
if (stream->h264Frame)
{
free(stream->h264Frame);
stream->h264Frame = nullptr;
}
#endif
if (stream->h264)
{
h264_context_free(stream->h264);
stream->h264 = nullptr;
}
}
#if defined(WITH_INPUT_FORMAT_MJPG)
/**
* Function description
*
* @return success/failure
*/
static BOOL ecam_init_mjpeg_decoder(CameraDeviceStream* stream)
{
WINPR_ASSERT(stream);
const AVCodec* avcodec = avcodec_find_decoder(AV_CODEC_ID_MJPEG);
if (!avcodec)
{
WLog_ERR(TAG, "avcodec_find_decoder failed to find MJPEG codec");
return FALSE;
}
stream->avContext = avcodec_alloc_context3(avcodec);
if (!stream->avContext)
{
WLog_ERR(TAG, "avcodec_alloc_context3 failed");
return FALSE;
}
stream->avContext->width = WINPR_ASSERTING_INT_CAST(int, stream->currMediaType.Width);
stream->avContext->height = WINPR_ASSERTING_INT_CAST(int, stream->currMediaType.Height);
/* AV_EF_EXPLODE flag is to abort decoding on minor error detection,
* return error, so we can skip corrupted frames, if any */
stream->avContext->err_recognition |= AV_EF_EXPLODE;
if (avcodec_open2(stream->avContext, avcodec, nullptr) < 0)
{
WLog_ERR(TAG, "avcodec_open2 failed");
return FALSE;
}
stream->avInputPkt = av_packet_alloc();
if (!stream->avInputPkt)
{
WLog_ERR(TAG, "av_packet_alloc failed");
return FALSE;
}
stream->avOutFrame = av_frame_alloc();
if (!stream->avOutFrame)
{
WLog_ERR(TAG, "av_frame_alloc failed");
return FALSE;
}
return TRUE;
}
#endif
/**
* Function description
*
* @return success/failure
*/
static BOOL ecam_encoder_context_init_h264(CameraDeviceStream* stream)
{
WINPR_ASSERT(stream);
#if defined(WITH_INPUT_FORMAT_H264)
if (streamInputFormat(stream) == CAM_MEDIA_FORMAT_MJPG_H264)
{
stream->h264FrameMaxSize = 1ULL * stream->currMediaType.Width *
stream->currMediaType.Height; /* 1 byte per pixel */
stream->h264Frame = (BYTE*)calloc(stream->h264FrameMaxSize, sizeof(BYTE));
return TRUE; /* encoder not needed */
}
#endif
if (!stream->h264)
stream->h264 = h264_context_new(TRUE);
if (!stream->h264)
{
WLog_ERR(TAG, "h264_context_new failed");
return FALSE;
}
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_USAGETYPE,
H264_CAMERA_VIDEO_REAL_TIME))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_FRAMERATE,
stream->currMediaType.FrameRateNumerator /
stream->currMediaType.FrameRateDenominator))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_BITRATE,
h264_get_max_bitrate(stream->currMediaType.Height)))
goto fail;
/* Using CQP mode for rate control. It produces more comparable quality
* between VAAPI and software encoding than VBR mode
*/
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_RATECONTROL,
H264_RATECONTROL_CQP))
goto fail;
/* Using 26 as CQP value. Lower values will produce better quality but
* higher bitrate; higher values - lower bitrate but degraded quality
*/
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_QP, 26))
goto fail;
/* Requesting hardware acceleration before calling h264_context_reset */
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_HW_ACCEL, TRUE))
goto fail;
if (!h264_context_reset(stream->h264, stream->currMediaType.Width,
stream->currMediaType.Height))
{
WLog_ERR(TAG, "h264_context_reset failed");
goto fail;
}
#if defined(WITH_INPUT_FORMAT_MJPG)
if (streamInputFormat(stream) == CAM_MEDIA_FORMAT_MJPG && !ecam_init_mjpeg_decoder(stream))
goto fail;
#endif
return TRUE;
fail:
ecam_encoder_context_free_h264(stream);
return FALSE;
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_context_init(CameraDeviceStream* stream)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
return ecam_encoder_context_init_h264(stream);
default:
WLog_ERR(TAG, "Unsupported output format %u", format);
return FALSE;
}
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_context_free(CameraDeviceStream* stream)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
ecam_encoder_context_free_h264(stream);
break;
default:
return FALSE;
}
return TRUE;
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_compress(CameraDeviceStream* stream, const BYTE* srcData, size_t srcSize,
BYTE** ppDstData, size_t* pDstSize)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
return ecam_encoder_compress_h264(stream, srcData, srcSize, ppDstData, pDstSize);
default:
WLog_ERR(TAG, "Unsupported output format %u", format);
return FALSE;
}
}

View File

@@ -0,0 +1,34 @@
# FreeRDP: A Remote Desktop Protocol Implementation
# FreeRDP cmake build script
#
# Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if(WITH_V4L)
define_channel_client_subsystem("rdpecam" "v4l" "")
find_package(libusb-1.0 REQUIRED)
freerdp_client_pc_add_requires_private("libusb-1.0")
include_directories(SYSTEM ${LIBUSB_1_INCLUDE_DIRS})
set(${MODULE_PREFIX}_SRCS camera_v4l.c uvc_h264.c)
set(${MODULE_PREFIX}_LIBS winpr freerdp ${LIBUSB_1_LIBRARIES})
include_directories(..)
add_channel_client_subsystem_library(${MODULE_PREFIX} ${MODULE_NAME} ${CHANNEL_NAME} "" TRUE "")
endif()

View File

@@ -0,0 +1,834 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, V4L Interface
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <errno.h>
#include <fcntl.h>
#include <poll.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
/* v4l includes */
#include <linux/videodev2.h>
#include "camera_v4l.h"
#include "uvc_h264.h"
#define TAG CHANNELS_TAG("rdpecam-v4l.client")
#define CAM_V4L2_BUFFERS_COUNT 4
#define CAM_V4L2_CAPTURE_THREAD_SLEEP_MS 1000
#define CAM_V4L2_FRAMERATE_NUMERATOR_DEFAULT 30
#define CAM_V4L2_FRAMERATE_DENOMINATOR_DEFAULT 1
typedef struct
{
ICamHal iHal;
wHashTable* streams; /* Index: deviceId, Value: CamV4lStream */
} CamV4lHal;
static CamV4lStream* cam_v4l_stream_create(const char* deviceId, size_t streamIndex);
static void cam_v4l_stream_free(void* obj);
static void cam_v4l_stream_close_device(CamV4lStream* stream);
static CAM_ERROR_CODE cam_v4l_stream_stop(CamV4lStream* stream);
/**
* Function description
*
* @return \0-terminated fourcc string
*/
static const char* cam_v4l_get_fourcc_str(unsigned int fourcc, char* buffer, size_t size)
{
if (size < 5)
return nullptr;
buffer[0] = (char)(fourcc & 0xFF);
buffer[1] = (char)((fourcc >> 8) & 0xFF);
buffer[2] = (char)((fourcc >> 16) & 0xFF);
buffer[3] = (char)((fourcc >> 24) & 0xFF);
buffer[4] = '\0';
return buffer;
}
/**
* Function description
*
* @return one of V4L2_PIX_FMT
*/
static UINT32 ecamToV4L2PixFormat(CAM_MEDIA_FORMAT ecamFormat)
{
switch (ecamFormat)
{
case CAM_MEDIA_FORMAT_H264:
return V4L2_PIX_FMT_H264;
case CAM_MEDIA_FORMAT_MJPG:
return V4L2_PIX_FMT_MJPEG;
case CAM_MEDIA_FORMAT_YUY2:
return V4L2_PIX_FMT_YUYV;
case CAM_MEDIA_FORMAT_NV12:
return V4L2_PIX_FMT_NV12;
case CAM_MEDIA_FORMAT_I420:
return V4L2_PIX_FMT_YUV420;
case CAM_MEDIA_FORMAT_RGB24:
return V4L2_PIX_FMT_RGB24;
case CAM_MEDIA_FORMAT_RGB32:
return V4L2_PIX_FMT_RGB32;
default:
WLog_ERR(TAG, "Unsupported CAM_MEDIA_FORMAT %u", ecamFormat);
return 0;
}
}
/**
* Function description
*
* @return TRUE or FALSE
*/
static BOOL cam_v4l_format_supported(int fd, UINT32 format)
{
struct v4l2_fmtdesc fmtdesc = WINPR_C_ARRAY_INIT;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (fmtdesc.index = 0; ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0; fmtdesc.index++)
{
if (fmtdesc.pixelformat == format)
return TRUE;
}
return FALSE;
}
/**
* Function description
*
* @return file descriptor
*/
static int cam_v4l_open_device(const char* deviceId, int flags)
{
char device[20] = WINPR_C_ARRAY_INIT;
int fd = -1;
struct v4l2_capability cap = WINPR_C_ARRAY_INIT;
if (!deviceId)
return -1;
if (0 == strncmp(deviceId, "/dev/video", 10))
return open(deviceId, flags);
for (UINT n = 0; n < 64; n++)
{
(void)_snprintf(device, sizeof(device), "/dev/video%" PRIu32, n);
if ((fd = open(device, flags)) == -1)
continue;
/* query device capabilities and make sure this is a video capture device */
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE))
{
close(fd);
continue;
}
if (cap.bus_info[0] != 0 && 0 == strcmp((const char*)cap.bus_info, deviceId))
return fd;
close(fd);
}
return fd;
}
static BOOL cam_v4l_activate(ICamHal* ihal, const char* deviceId, CAM_ERROR_CODE* errorCode)
{
WINPR_UNUSED(ihal);
WINPR_UNUSED(deviceId);
*errorCode = CAM_ERROR_CODE_None;
return TRUE;
}
static BOOL cam_v4l_deactivate(ICamHal* ihal, const char* deviceId, CAM_ERROR_CODE* errorCode)
{
WINPR_UNUSED(ihal);
WINPR_UNUSED(deviceId);
*errorCode = CAM_ERROR_CODE_None;
return TRUE;
}
/**
* Function description
*
* @return -1 if error, otherwise index of supportedFormats array and mediaTypes/nMediaTypes filled
* in
*/
static INT16 cam_v4l_get_media_type_descriptions(ICamHal* ihal, const char* deviceId,
size_t streamIndex,
const CAM_MEDIA_FORMAT_INFO* supportedFormats,
size_t nSupportedFormats,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes,
size_t* nMediaTypes)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
size_t maxMediaTypes = *nMediaTypes;
size_t nTypes = 0;
BOOL formatFound = FALSE;
CamV4lStream* stream = (CamV4lStream*)HashTable_GetItemValue(hal->streams, deviceId);
if (!stream)
{
stream = cam_v4l_stream_create(deviceId, streamIndex);
if (!stream)
return CAM_ERROR_CODE_OutOfMemory;
if (!HashTable_Insert(hal->streams, deviceId, stream))
{
cam_v4l_stream_free(stream);
return CAM_ERROR_CODE_UnexpectedError;
}
}
int fd = cam_v4l_open_device(deviceId, O_RDONLY);
if (fd == -1)
{
WLog_ERR(TAG, "Unable to open device %s", deviceId);
return -1;
}
size_t formatIndex = 0;
for (; formatIndex < nSupportedFormats; formatIndex++)
{
UINT32 pixelFormat = 0;
if (supportedFormats[formatIndex].inputFormat == CAM_MEDIA_FORMAT_MJPG_H264)
{
if (stream->h264UnitId > 0)
pixelFormat = V4L2_PIX_FMT_MJPEG;
else
continue; /* not supported */
}
else
{
pixelFormat = ecamToV4L2PixFormat(supportedFormats[formatIndex].inputFormat);
}
WINPR_ASSERT(pixelFormat != 0);
struct v4l2_frmsizeenum frmsize = WINPR_C_ARRAY_INIT;
if (!cam_v4l_format_supported(fd, pixelFormat))
continue;
frmsize.pixel_format = pixelFormat;
for (frmsize.index = 0; ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0; frmsize.index++)
{
struct v4l2_frmivalenum frmival = WINPR_C_ARRAY_INIT;
if (frmsize.type != V4L2_FRMSIZE_TYPE_DISCRETE)
break; /* don't support size types other than discrete */
formatFound = TRUE;
mediaTypes->Width = frmsize.discrete.width;
mediaTypes->Height = frmsize.discrete.height;
mediaTypes->Format = supportedFormats[formatIndex].inputFormat;
/* query frame rate (1st is highest fps supported) */
frmival.index = 0;
frmival.pixel_format = pixelFormat;
frmival.width = frmsize.discrete.width;
frmival.height = frmsize.discrete.height;
if (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) == 0 &&
frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
{
/* inverse of a fraction */
mediaTypes->FrameRateNumerator = frmival.discrete.denominator;
mediaTypes->FrameRateDenominator = frmival.discrete.numerator;
}
else
{
WLog_DBG(TAG, "VIDIOC_ENUM_FRAMEINTERVALS failed, using default framerate");
mediaTypes->FrameRateNumerator = CAM_V4L2_FRAMERATE_NUMERATOR_DEFAULT;
mediaTypes->FrameRateDenominator = CAM_V4L2_FRAMERATE_DENOMINATOR_DEFAULT;
}
mediaTypes->PixelAspectRatioNumerator = mediaTypes->PixelAspectRatioDenominator = 1;
char fourccstr[5] = WINPR_C_ARRAY_INIT;
WLog_DBG(TAG, "Camera format: %s, width: %u, height: %u, fps: %u/%u",
cam_v4l_get_fourcc_str(pixelFormat, fourccstr, ARRAYSIZE(fourccstr)),
mediaTypes->Width, mediaTypes->Height, mediaTypes->FrameRateNumerator,
mediaTypes->FrameRateDenominator);
mediaTypes++;
nTypes++;
if (nTypes == maxMediaTypes)
{
WLog_ERR(TAG, "Media types reached buffer maximum %" PRIuz "", maxMediaTypes);
goto error;
}
}
if (formatFound)
{
/* we are interested in 1st supported format only, with all supported sizes */
break;
}
}
error:
*nMediaTypes = nTypes;
close(fd);
if (formatIndex > INT16_MAX)
return -1;
return (INT16)formatIndex;
}
/**
* Function description
*
* @return number of video capture devices
*/
static UINT cam_v4l_enumerate(WINPR_ATTR_UNUSED ICamHal* ihal, ICamHalEnumCallback callback,
CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel)
{
UINT count = 0;
for (UINT n = 0; n < 64; n++)
{
char device[20] = WINPR_C_ARRAY_INIT;
struct v4l2_capability cap = WINPR_C_ARRAY_INIT;
(void)_snprintf(device, sizeof(device), "/dev/video%" PRIu32, n);
int fd = open(device, O_RDONLY);
if (fd == -1)
continue;
/* query device capabilities and make sure this is a video capture device */
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE))
{
close(fd);
continue;
}
count++;
const char* deviceName = (char*)cap.card;
const char* deviceId = device;
if (cap.bus_info[0] != 0) /* may not be available in all drivers */
deviceId = (char*)cap.bus_info;
IFCALL(callback, ecam, hchannel, deviceId, deviceName);
close(fd);
}
return count;
}
static void cam_v4l_stream_free_buffers(CamV4lStream* stream)
{
if (!stream || !stream->buffers)
return;
/* unmap buffers */
for (size_t i = 0; i < stream->nBuffers; i++)
{
if (stream->buffers[i].length && stream->buffers[i].start != MAP_FAILED)
{
munmap(stream->buffers[i].start, stream->buffers[i].length);
}
}
free(stream->buffers);
stream->buffers = nullptr;
stream->nBuffers = 0;
}
/**
* Function description
*
* @return 0 on failure, otherwise allocated buffer size
*/
static size_t cam_v4l_stream_alloc_buffers(CamV4lStream* stream)
{
struct v4l2_requestbuffers rbuffer = WINPR_C_ARRAY_INIT;
rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
rbuffer.memory = V4L2_MEMORY_MMAP;
rbuffer.count = CAM_V4L2_BUFFERS_COUNT;
if (ioctl(stream->fd, VIDIOC_REQBUFS, &rbuffer) < 0 || rbuffer.count == 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_REQBUFS, errno %s [%d], count %u",
winpr_strerror(errno, buffer, sizeof(buffer)), errno, rbuffer.count);
return 0;
}
stream->nBuffers = rbuffer.count;
/* Map the buffers */
stream->buffers = (CamV4lBuffer*)calloc(rbuffer.count, sizeof(CamV4lBuffer));
if (!stream->buffers)
{
WLog_ERR(TAG, "Failure in calloc");
return 0;
}
for (unsigned int i = 0; i < rbuffer.count; i++)
{
struct v4l2_buffer vbuffer = WINPR_C_ARRAY_INIT;
vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vbuffer.memory = V4L2_MEMORY_MMAP;
vbuffer.index = i;
if (ioctl(stream->fd, VIDIOC_QUERYBUF, &vbuffer) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_QUERYBUF, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
stream->buffers[i].start = mmap(nullptr, vbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
stream->fd, vbuffer.m.offset);
if (MAP_FAILED == stream->buffers[i].start)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in mmap, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
stream->buffers[i].length = vbuffer.length;
WLog_DBG(TAG, "Buffer %u mapped, size: %u", i, vbuffer.length);
if (ioctl(stream->fd, VIDIOC_QBUF, &vbuffer) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_QBUF, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
}
return stream->buffers[0].length;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static DWORD WINAPI cam_v4l_stream_capture_thread(LPVOID param)
{
CamV4lStream* stream = (CamV4lStream*)param;
WINPR_ASSERT(stream);
int fd = stream->fd;
do
{
int retVal = 0;
struct pollfd pfd = WINPR_C_ARRAY_INIT;
pfd.fd = fd;
pfd.events = POLLIN;
retVal = poll(&pfd, 1, CAM_V4L2_CAPTURE_THREAD_SLEEP_MS);
if (retVal == 0)
{
/* poll timed out */
continue;
}
else if (retVal < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_DBG(TAG, "Failure in poll, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
Sleep(CAM_V4L2_CAPTURE_THREAD_SLEEP_MS); /* trying to recover */
continue;
}
else if (!(pfd.revents & POLLIN))
{
WLog_DBG(TAG, "poll reported non-read event %d", pfd.revents);
Sleep(CAM_V4L2_CAPTURE_THREAD_SLEEP_MS); /* also trying to recover */
continue;
}
EnterCriticalSection(&stream->lock);
if (stream->streaming)
{
struct v4l2_buffer buf = WINPR_C_ARRAY_INIT;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
/* dequeue buffers until empty */
while (ioctl(fd, VIDIOC_DQBUF, &buf) != -1)
{
const UINT error =
stream->sampleCallback(stream->dev, stream->streamIndex,
stream->buffers[buf.index].start, buf.bytesused);
if (error != CHANNEL_RC_OK)
WLog_ERR(TAG, "Failure in sampleCallback: %" PRIu32, error);
/* enqueue buffer back */
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_QBUF, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
}
}
}
LeaveCriticalSection(&stream->lock);
} while (stream->streaming);
return CHANNEL_RC_OK;
}
void cam_v4l_stream_close_device(CamV4lStream* stream)
{
if (stream->fd != -1)
{
close(stream->fd);
stream->fd = -1;
}
}
/**
* Function description
*
* @return Null on failure, otherwise pointer to new CamV4lStream
*/
WINPR_ATTR_MALLOC(cam_v4l_stream_free, 1)
CamV4lStream* cam_v4l_stream_create(const char* deviceId, size_t streamIndex)
{
CamV4lStream* stream = calloc(1, sizeof(CamV4lStream));
if (!stream)
{
WLog_ERR(TAG, "Failure in calloc");
return nullptr;
}
stream->streamIndex = streamIndex;
stream->fd = -1;
stream->h264UnitId = get_uvc_h624_unit_id(deviceId);
if (!InitializeCriticalSectionEx(&stream->lock, 0, 0))
{
WLog_ERR(TAG, "Failure in calloc");
free(stream);
return nullptr;
}
return stream;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
CAM_ERROR_CODE cam_v4l_stream_stop(CamV4lStream* stream)
{
if (!stream || !stream->streaming)
return CAM_ERROR_CODE_None;
stream->streaming = FALSE; /* this will terminate capture thread */
if (stream->captureThread)
{
(void)WaitForSingleObject(stream->captureThread, INFINITE);
(void)CloseHandle(stream->captureThread);
stream->captureThread = nullptr;
}
EnterCriticalSection(&stream->lock);
/* stop streaming */
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_STREAMOFF, &type) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_STREAMOFF, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
}
cam_v4l_stream_free_buffers(stream);
cam_v4l_stream_close_device(stream);
LeaveCriticalSection(&stream->lock);
return CAM_ERROR_CODE_None;
}
static CAM_ERROR_CODE cam_v4l_stream_start(ICamHal* ihal, CameraDevice* dev, size_t streamIndex,
const CAM_MEDIA_TYPE_DESCRIPTION* mediaType,
ICamHalSampleCapturedCallback callback)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
WINPR_ASSERT(hal);
CamV4lStream* stream = (CamV4lStream*)HashTable_GetItemValue(hal->streams, dev->deviceId);
if (!stream)
{
WLog_ERR(TAG, "Unable to find stream, device %s, streamIndex %" PRIuz, dev->deviceId,
streamIndex);
return CAM_ERROR_CODE_UnexpectedError;
}
if (stream->streaming)
{
WLog_ERR(TAG, "Streaming already in progress, device %s, streamIndex %" PRIuz,
dev->deviceId, streamIndex);
return CAM_ERROR_CODE_UnexpectedError;
}
stream->dev = dev;
stream->sampleCallback = callback;
if ((stream->fd = cam_v4l_open_device(dev->deviceId, O_RDWR | O_NONBLOCK)) == -1)
{
WLog_ERR(TAG, "Unable to open device %s", dev->deviceId);
return CAM_ERROR_CODE_UnexpectedError;
}
struct v4l2_format video_fmt = WINPR_C_ARRAY_INIT;
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
UINT32 pixelFormat = 0;
if (mediaType->Format == CAM_MEDIA_FORMAT_MJPG_H264)
{
if (!set_h264_muxed_format(stream, mediaType))
{
WLog_ERR(TAG, "Failure to set H264 muxed format");
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_UnexpectedError;
}
/* setup container stream format */
pixelFormat = V4L2_PIX_FMT_MJPEG;
/* limit container stream resolution to save USB bandwidth - required */
video_fmt.fmt.pix.width = 640;
video_fmt.fmt.pix.height = 480;
}
else
{
pixelFormat = ecamToV4L2PixFormat(mediaType->Format);
video_fmt.fmt.pix.width = mediaType->Width;
video_fmt.fmt.pix.height = mediaType->Height;
}
if (pixelFormat == 0)
{
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_InvalidMediaType;
}
video_fmt.fmt.pix.pixelformat = pixelFormat;
/* set format and frame size */
if (ioctl(stream->fd, VIDIOC_S_FMT, &video_fmt) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_S_FMT, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_InvalidMediaType;
}
/* trying to set frame rate, if driver supports it */
struct v4l2_streamparm sp1 = WINPR_C_ARRAY_INIT;
struct v4l2_streamparm sp2 = WINPR_C_ARRAY_INIT;
sp1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_G_PARM, &sp1) < 0 ||
!(sp1.parm.capture.capability & V4L2_CAP_TIMEPERFRAME))
{
WLog_INFO(TAG, "Driver doesn't support setting framerate");
}
else
{
sp2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
/* inverse of a fraction */
sp2.parm.capture.timeperframe.numerator = mediaType->FrameRateDenominator;
sp2.parm.capture.timeperframe.denominator = mediaType->FrameRateNumerator;
if (ioctl(stream->fd, VIDIOC_S_PARM, &sp2) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_INFO(TAG, "Failed to set the framerate, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
}
}
size_t maxSample = cam_v4l_stream_alloc_buffers(stream);
if (maxSample == 0)
{
WLog_ERR(TAG, "Failure to allocate video buffers");
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_OutOfMemory;
}
stream->streaming = TRUE;
/* start streaming */
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_STREAMON, &type) < 0)
{
char buffer[64] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "Failure in VIDIOC_STREAMON, errno %s [%d]",
winpr_strerror(errno, buffer, sizeof(buffer)), errno);
cam_v4l_stream_stop(stream);
return CAM_ERROR_CODE_UnexpectedError;
}
stream->captureThread =
CreateThread(nullptr, 0, cam_v4l_stream_capture_thread, stream, 0, nullptr);
if (!stream->captureThread)
{
WLog_ERR(TAG, "CreateThread failure");
cam_v4l_stream_stop(stream);
return CAM_ERROR_CODE_OutOfMemory;
}
char fourccstr[16] = WINPR_C_ARRAY_INIT;
if (mediaType->Format == CAM_MEDIA_FORMAT_MJPG_H264)
strncpy(fourccstr, "H264 muxed", ARRAYSIZE(fourccstr) - 1);
else
cam_v4l_get_fourcc_str(pixelFormat, fourccstr, ARRAYSIZE(fourccstr));
WLog_INFO(TAG, "Camera format: %s, width: %u, height: %u, fps: %u/%u", fourccstr,
mediaType->Width, mediaType->Height, mediaType->FrameRateNumerator,
mediaType->FrameRateDenominator);
return CAM_ERROR_CODE_None;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static CAM_ERROR_CODE cam_v4l_stream_stop_by_device_id(ICamHal* ihal, const char* deviceId,
WINPR_ATTR_UNUSED size_t streamIndex)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
CamV4lStream* stream = (CamV4lStream*)HashTable_GetItemValue(hal->streams, deviceId);
if (!stream)
return CAM_ERROR_CODE_NotInitialized;
return cam_v4l_stream_stop(stream);
}
/**
* Function description
*
* OBJECT_FREE_FN for streams hash table value
*
*/
void cam_v4l_stream_free(void* obj)
{
CamV4lStream* stream = (CamV4lStream*)obj;
if (!stream)
return;
cam_v4l_stream_stop(stream);
DeleteCriticalSection(&stream->lock);
free(stream);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static CAM_ERROR_CODE cam_v4l_free(ICamHal* ihal)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
if (hal == nullptr)
return CAM_ERROR_CODE_NotInitialized;
HashTable_Free(hal->streams);
free(hal);
return CAM_ERROR_CODE_None;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
FREERDP_ENTRY_POINT(UINT VCAPITYPE v4l_freerdp_rdpecam_client_subsystem_entry(
PFREERDP_CAMERA_HAL_ENTRY_POINTS pEntryPoints))
{
UINT ret = CHANNEL_RC_OK;
WINPR_ASSERT(pEntryPoints);
CamV4lHal* hal = (CamV4lHal*)calloc(1, sizeof(CamV4lHal));
if (hal == nullptr)
return CHANNEL_RC_NO_MEMORY;
hal->iHal.Enumerate = cam_v4l_enumerate;
hal->iHal.GetMediaTypeDescriptions = cam_v4l_get_media_type_descriptions;
hal->iHal.Activate = cam_v4l_activate;
hal->iHal.Deactivate = cam_v4l_deactivate;
hal->iHal.StartStream = cam_v4l_stream_start;
hal->iHal.StopStream = cam_v4l_stream_stop_by_device_id;
hal->iHal.Free = cam_v4l_free;
hal->streams = HashTable_New(FALSE);
if (!hal->streams)
{
ret = CHANNEL_RC_NO_MEMORY;
goto error;
}
HashTable_SetupForStringData(hal->streams, FALSE);
wObject* obj = HashTable_ValueObject(hal->streams);
WINPR_ASSERT(obj);
obj->fnObjectFree = cam_v4l_stream_free;
if ((ret = pEntryPoints->pRegisterCameraHal(pEntryPoints->plugin, &hal->iHal)))
{
WLog_ERR(TAG, "RegisterCameraHal failed with error %" PRIu32 "", ret);
goto error;
}
return ret;
error:
cam_v4l_free(&hal->iHal);
return ret;
}

View File

@@ -0,0 +1,54 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, V4L Interface
*
* Copyright 2025 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CAMERA_V4L_H
#define CAMERA_V4L_H
#include <winpr/synch.h>
#include <winpr/wtypes.h>
#include "../camera.h"
typedef struct
{
void* start;
size_t length;
} CamV4lBuffer;
typedef struct
{
CRITICAL_SECTION lock;
/* members used to call the callback */
CameraDevice* dev;
size_t streamIndex;
WINPR_ATTR_NODISCARD ICamHalSampleCapturedCallback sampleCallback;
BOOL streaming;
int fd;
uint8_t h264UnitId; /* UVC H264 UnitId, if 0 then UVC H264 is not supported */
size_t nBuffers;
CamV4lBuffer* buffers;
HANDLE captureThread;
} CamV4lStream;
#endif /* CAMERA_V4L_H */

View File

@@ -0,0 +1,489 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, UVC H264 support
*
* See USB_Video_Payload_H 264_1 0.pdf for more details
*
* Credits:
* guvcview http://guvcview.sourceforge.net
* Paulo Assis <pj.assis@gmail.com>
*
* Copyright 2025 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <sys/ioctl.h>
#include <linux/uvcvideo.h>
#include <linux/videodev2.h>
#include <libusb.h>
#include "uvc_h264.h"
/* UVC H.264 extension unit GUID: {A29E7641-DE04-47E3-8B2B-F4341AFF003B} */
static uint8_t GUID_UVCX_H264_XU[16] = { 0x41, 0x76, 0x9E, 0xA2, 0x04, 0xDE, 0xE3, 0x47,
0x8B, 0x2B, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B };
#define TAG CHANNELS_TAG("rdpecam-uvch264.client")
/*
* get length of xu control defined by unit id and selector
* args:
* stream - pointer to video device data
* unit - unit id of xu control
* selector - selector for control
*
* returns: length of xu control
*/
static uint16_t get_length_xu_control(CamV4lStream* stream, uint8_t unit, uint8_t selector)
{
WINPR_ASSERT(stream);
WINPR_ASSERT(stream->fd > 0);
uint16_t length = 0;
struct uvc_xu_control_query xu_ctrl_query = { .unit = unit,
.selector = selector,
.query = UVC_GET_LEN,
.size = sizeof(length),
.data = (uint8_t*)&length };
if (ioctl(stream->fd, UVCIOC_CTRL_QUERY, &xu_ctrl_query) < 0)
{
char ebuffer[256] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "UVCIOC_CTRL_QUERY (GET_LEN) - Error: %s",
winpr_strerror(errno, ebuffer, sizeof(ebuffer)));
return 0;
}
return length;
}
/*
* runs a query on xu control defined by unit id and selector
* args:
* stream - pointer to video device data
* unit - unit id of xu control
* selector - selector for control
* query - query type
* data - pointer to query data
*
* returns: 0 if query succeeded or error code on fail
*/
static int query_xu_control(CamV4lStream* stream, uint8_t unit, uint8_t selector, uint8_t query,
void* data)
{
int err = 0;
uint16_t len = get_length_xu_control(stream, unit, selector);
struct uvc_xu_control_query xu_ctrl_query = {
.unit = unit, .selector = selector, .query = query, .size = len, .data = (uint8_t*)data
};
/*get query data*/
if ((err = ioctl(stream->fd, UVCIOC_CTRL_QUERY, &xu_ctrl_query)) < 0)
{
char ebuffer[256] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "UVCIOC_CTRL_QUERY (%" PRIu8 ") - Error: %s", query,
winpr_strerror(errno, ebuffer, sizeof(ebuffer)));
}
return err;
}
/*
* resets the h264 encoder
* args:
* stream - pointer to video device data
*
* returns: 0 on success or error code on fail
*/
static int uvcx_video_encoder_reset(CamV4lStream* stream)
{
WINPR_ASSERT(stream);
uvcx_encoder_reset encoder_reset_req = WINPR_C_ARRAY_INIT;
int err = 0;
if ((err = query_xu_control(stream, stream->h264UnitId, UVCX_ENCODER_RESET, UVC_SET_CUR,
&encoder_reset_req)) < 0)
{
char ebuffer[256] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "UVCX_ENCODER_RESET error: %s",
winpr_strerror(errno, ebuffer, sizeof(ebuffer)));
}
return err;
}
/*
* probes the h264 encoder config
* args:
* stream - pointer to video device data
* query - probe query
* uvcx_video_config - pointer to probe/commit config data
*
* returns: 0 on success or error code on fail
*/
static int uvcx_video_probe(CamV4lStream* stream, uint8_t query,
uvcx_video_config_probe_commit_t* uvcx_video_config)
{
WINPR_ASSERT(stream);
int err = 0;
if ((err = query_xu_control(stream, stream->h264UnitId, UVCX_VIDEO_CONFIG_PROBE, query,
uvcx_video_config)) < 0)
{
char ebuffer[256] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "UVCX_VIDEO_CONFIG_PROBE error: %s",
winpr_strerror(errno, ebuffer, sizeof(ebuffer)));
}
return err;
}
/*
* commits the h264 encoder config
* args:
* stream - pointer to video device data
* uvcx_video_config - pointer to probe/commit config data
*
* returns: 0 on success or error code on fail
*/
static int uvcx_video_commit(CamV4lStream* stream,
uvcx_video_config_probe_commit_t* uvcx_video_config)
{
WINPR_ASSERT(stream);
int err = 0;
if ((err = query_xu_control(stream, stream->h264UnitId, UVCX_VIDEO_CONFIG_COMMIT, UVC_SET_CUR,
uvcx_video_config)) < 0)
{
char ebuffer[256] = WINPR_C_ARRAY_INIT;
WLog_ERR(TAG, "UVCX_VIDEO_CONFIG_COMMIT error: %s",
winpr_strerror(errno, ebuffer, sizeof(ebuffer)));
}
return err;
}
/*
* sets h264 muxed format (must not be called while streaming)
* args:
* stream - pointer to video device data
* mediaType
*
* returns: TRUE on success or FALSE on fail
*/
BOOL set_h264_muxed_format(CamV4lStream* stream, const CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(stream);
WINPR_ASSERT(mediaType);
uvcx_video_config_probe_commit_t config_probe_req = WINPR_C_ARRAY_INIT;
int err = 0;
/* reset the encoder */
err = uvcx_video_encoder_reset(stream);
if (err != 0)
return FALSE;
/* get default values */
err = uvcx_video_probe(stream, UVC_GET_DEF, &config_probe_req);
if (err != 0)
return FALSE;
/* set resolution */
config_probe_req.wWidth = WINPR_ASSERTING_INT_CAST(uint16_t, mediaType->Width);
config_probe_req.wHeight = WINPR_ASSERTING_INT_CAST(uint16_t, mediaType->Height);
/* set frame rate in 100ns units */
uint32_t frame_interval =
(mediaType->FrameRateDenominator * 1000000000LL / mediaType->FrameRateNumerator) / 100;
config_probe_req.dwFrameInterval = frame_interval;
/* quality settings */
config_probe_req.wProfile = PROFILE_HIGH;
config_probe_req.bUsageType = USAGETYPE_REALTIME;
config_probe_req.bRateControlMode = RATECONTROL_VBR;
config_probe_req.dwBitRate = h264_get_max_bitrate(mediaType->Height);
config_probe_req.bEntropyCABAC = ENTROPY_CABAC;
config_probe_req.wIFramePeriod = 1000; /* ms, 1 sec */
/* hints which parameters are configured */
config_probe_req.bmHints = BMHINTS_RESOLUTION | BMHINTS_FRAME_INTERVAL | BMHINTS_PROFILE |
BMHINTS_USAGE | BMHINTS_RATECONTROL | BMHINTS_BITRATE |
BMHINTS_ENTROPY | BMHINTS_IFRAMEPERIOD;
/* set the aux stream */
config_probe_req.bStreamMuxOption = STREAMMUX_H264;
/* probe the format */
err = uvcx_video_probe(stream, UVC_SET_CUR, &config_probe_req);
if (err != 0)
return FALSE;
err = uvcx_video_probe(stream, UVC_GET_CUR, &config_probe_req);
if (err != 0)
return FALSE;
if (config_probe_req.wWidth != mediaType->Width)
{
WLog_ERR(TAG, "Requested width %" PRIu16 " but got %" PRIu16, mediaType->Width,
config_probe_req.wWidth);
return FALSE;
}
if (config_probe_req.wHeight != mediaType->Height)
{
WLog_ERR(TAG, "Requested height %" PRIu16 " but got %" PRIu16, mediaType->Height,
config_probe_req.wHeight);
return FALSE;
}
if (config_probe_req.dwFrameInterval != frame_interval)
{
WLog_ERR(TAG, "Requested frame interval %" PRIu32 " but got %" PRIu32, frame_interval,
config_probe_req.dwFrameInterval);
return FALSE;
}
/* commit the format */
err = uvcx_video_commit(stream, &config_probe_req);
return (err == 0);
}
/*
* parses deviceId such as usb-0000:00:1a.0-1.2.2 to return devpath (1.2.2)
*
* deviceID format is: usb-<busname>-<devpath>
* see kernel's usb_make_path()
*
* args:
* deviceId
* path - buffer to return devpath
* size - buffer size
*
* returns: TRUE if success, FALSE otherwise
*/
static BOOL get_devpath_from_device_id(const char* deviceId, char* path, size_t size)
{
if (0 != strncmp(deviceId, "usb-", 4))
return FALSE;
/* find second `-` */
const char* p = strchr(deviceId + 4, '-');
if (!p)
return FALSE;
p++; // now points to nullptr terminated devpath
strncpy(path, p, size - 1);
return TRUE;
}
/*
* return devpath of a given libusb_device as text string such as: 1.2.2 or 2.3
*
* args:
* device
* path - buffer to return devpath
* size - buffer size
*
* returns: TRUE if success, FALSE otherwise
*/
static BOOL get_devpath_from_device(libusb_device* device, char* path, size_t size)
{
uint8_t ports[MAX_DEVPATH_DEPTH] = WINPR_C_ARRAY_INIT;
int nPorts = libusb_get_port_numbers(device, ports, sizeof(ports));
if (nPorts <= 0)
return FALSE;
for (int i = 0; i < nPorts; i++)
{
int nChars = snprintf(path, size, "%" PRIu8, ports[i]);
if ((nChars <= 0) || ((size_t)nChars >= size))
return FALSE;
size -= (size_t)nChars;
path += nChars;
if (i < nPorts - 1)
{
*path++ = '.';
size--;
}
}
return TRUE;
}
static uint8_t get_guid_unit_id_from_config_descriptor(struct libusb_config_descriptor* config,
const uint8_t* guid,
const struct libusb_device_descriptor* ddesc)
{
WINPR_ASSERT(config);
WINPR_ASSERT(guid);
WINPR_ASSERT(ddesc);
for (uint8_t j = 0; j < config->bNumInterfaces; j++)
{
const struct libusb_interface* cfg = &config->interface[j];
for (int k = 0; k < cfg->num_altsetting; k++)
{
const struct libusb_interface_descriptor* interface = &cfg->altsetting[k];
if (interface->bInterfaceClass != LIBUSB_CLASS_VIDEO ||
interface->bInterfaceSubClass != USB_VIDEO_CONTROL)
continue;
const uint8_t* ptr = interface->extra;
while (ptr < interface->extra + interface->extra_length)
{
const xu_descriptor* desc = (const xu_descriptor*)ptr;
if (desc->bDescriptorType == USB_VIDEO_CONTROL_INTERFACE &&
desc->bDescriptorSubType == USB_VIDEO_CONTROL_XU_TYPE &&
memcmp(desc->guidExtensionCode, guid, 16) == 0)
{
int8_t unit_id = desc->bUnitID;
WLog_DBG(TAG,
"For camera %04" PRIx16 ":%04" PRIx16
" found UVCX H264 UnitID %" PRId8,
ddesc->idVendor, ddesc->idProduct, unit_id);
if (unit_id < 0)
return 0;
return WINPR_CXX_COMPAT_CAST(uint8_t, unit_id);
}
ptr += desc->bLength;
}
}
}
return 0;
}
/*
* get GUID unit id from libusb_device, if any
*
* args:
* device
* guid - 16 byte xu GUID
*
* returns: unit id for the matching GUID or 0 if none
*/
static uint8_t get_guid_unit_id_from_device(libusb_device* device, const uint8_t* guid)
{
struct libusb_device_descriptor ddesc = WINPR_C_ARRAY_INIT;
if (libusb_get_device_descriptor(device, &ddesc) != 0)
{
WLog_ERR(TAG, "Couldn't get device descriptor");
return 0;
}
for (uint8_t i = 0; i < ddesc.bNumConfigurations; ++i)
{
uint8_t rc = 0;
struct libusb_config_descriptor* config = nullptr;
if (libusb_get_config_descriptor(device, i, &config) != 0)
{
WLog_ERR(TAG,
"Couldn't get config descriptor for "
"configuration %" PRIu8,
i);
}
else
rc = get_guid_unit_id_from_config_descriptor(config, guid, &ddesc);
libusb_free_config_descriptor(config);
if (rc != 0)
return rc;
}
/* no match found */
return 0;
}
/*
* get GUID unit id, if any
*
* args:
* deviceId - camera deviceId such as: usb-0000:00:1a.0-1.2.2
* guid - 16 byte xu GUID
*
* returns: unit id for the matching GUID or 0 if none
*/
static uint8_t get_guid_unit_id(const char* deviceId, const uint8_t* guid)
{
char cam_devpath[MAX_DEVPATH_STR_SIZE] = WINPR_C_ARRAY_INIT;
libusb_context* usb_ctx = nullptr;
libusb_device** device_list = nullptr;
uint8_t unit_id = 0;
if (!get_devpath_from_device_id(deviceId, cam_devpath, sizeof(cam_devpath)))
{
WLog_ERR(TAG, "Unable to get devpath from deviceId %s", deviceId);
return 0;
}
if (0 != libusb_init(&usb_ctx))
{
WLog_ERR(TAG, "Unable to initialize libusb");
return 0;
}
ssize_t cnt = libusb_get_device_list(usb_ctx, &device_list);
for (ssize_t i = 0; i < cnt; i++)
{
char path[MAX_DEVPATH_STR_SIZE] = WINPR_C_ARRAY_INIT;
libusb_device* device = device_list[i];
if (!device || !get_devpath_from_device(device, path, sizeof(path)))
continue;
if (0 != strcmp(cam_devpath, path))
continue;
/* found device with matching devpath, try to get guid unit id */
unit_id = get_guid_unit_id_from_device(device, guid);
if (unit_id > 0)
break; /* got it */
/* there's chance for another devpath match - continue */
}
libusb_free_device_list(device_list, TRUE);
libusb_exit(usb_ctx);
return unit_id;
}
/*
* gets the uvc h264 xu control unit id, if any
*
* args:
* deviceId - camera deviceId such as: usb-0000:00:1a.0-1.2.2
*
* returns: unit id or 0 if none
*/
uint8_t get_uvc_h624_unit_id(const char* deviceId)
{
WINPR_ASSERT(deviceId);
WLog_DBG(TAG, "Checking for UVCX H264 UnitID for %s", deviceId);
return get_guid_unit_id(deviceId, GUID_UVCX_H264_XU);
}

View File

@@ -0,0 +1,173 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, UVC H264 support
*
* See USB_Video_Payload_H 264_1 0.pdf for more details
*
* Credits:
* guvcview http://guvcview.sourceforge.net
* Paulo Assis <pj.assis@gmail.com>
*
* Copyright 2025 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef UVC_H264_H
#define UVC_H264_H
#include <winpr/wtypes.h>
#include "../camera.h"
#include "camera_v4l.h"
/* UVC H.264 control selectors */
#define UVCX_VIDEO_CONFIG_PROBE 0x01
#define UVCX_VIDEO_CONFIG_COMMIT 0x02
#define UVCX_RATE_CONTROL_MODE 0x03
#define UVCX_TEMPORAL_SCALE_MODE 0x04
#define UVCX_SPATIAL_SCALE_MODE 0x05
#define UVCX_SNR_SCALE_MODE 0x06
#define UVCX_LTR_BUFFER_SIZE_CONTROL 0x07
#define UVCX_LTR_PICTURE_CONTROL 0x08
#define UVCX_PICTURE_TYPE_CONTROL 0x09
#define UVCX_VERSION 0x0A
#define UVCX_ENCODER_RESET 0x0B
#define UVCX_FRAMERATE_CONFIG 0x0C
#define UVCX_VIDEO_ADVANCE_CONFIG 0x0D
#define UVCX_BITRATE_LAYERS 0x0E
#define UVCX_QP_STEPS_LAYERS 0x0F
/* Video Class-Specific Request Codes */
#define UVC_RC_UNDEFINED 0x00
#define UVC_SET_CUR 0x01
#define UVC_GET_CUR 0x81
#define UVC_GET_MIN 0x82
#define UVC_GET_MAX 0x83
#define UVC_GET_RES 0x84
#define UVC_GET_LEN 0x85
#define UVC_GET_INFO 0x86
#define UVC_GET_DEF 0x87
/* bStreamMuxOption defines */
#define STREAMMUX_H264 (1 << 0) | (1 << 1)
/* wProfile defines */
#define PROFILE_BASELINE 0x4200
#define PROFILE_MAIN 0x4D00
#define PROFILE_HIGH 0x6400
/* bUsageType defines */
#define USAGETYPE_REALTIME 0x01
/* bRateControlMode defines */
#define RATECONTROL_CBR 0x01
#define RATECONTROL_VBR 0x02
#define RATECONTROL_CONST_QP 0x03
/* bEntropyCABAC defines */
#define ENTROPY_CABAC 0x01
/* bmHints defines */
#define BMHINTS_RESOLUTION 0x0001
#define BMHINTS_PROFILE 0x0002
#define BMHINTS_RATECONTROL 0x0004
#define BMHINTS_USAGE 0x0008
#define BMHINTS_SLICEMODE 0x0010
#define BMHINTS_SLICEUNITS 0x0020
#define BMHINTS_MVCVIEW 0x0040
#define BMHINTS_TEMPORAL 0x0080
#define BMHINTS_SNR 0x0100
#define BMHINTS_SPATIAL 0x0200
#define BMHINTS_SPATIAL_RATIO 0x0400
#define BMHINTS_FRAME_INTERVAL 0x0800
#define BMHINTS_LEAKY_BKT_SIZE 0x1000
#define BMHINTS_BITRATE 0x2000
#define BMHINTS_ENTROPY 0x4000
#define BMHINTS_IFRAMEPERIOD 0x8000
/* USB related defines */
#define USB_VIDEO_CONTROL 0x01
#define USB_VIDEO_CONTROL_INTERFACE 0x24
#define USB_VIDEO_CONTROL_XU_TYPE 0x06
#define MAX_DEVPATH_DEPTH 8
#define MAX_DEVPATH_STR_SIZE 32
#define WINPR_PACK_PUSH
#include <winpr/pack.h>
/* h264 probe commit struct (uvc 1.1) - packed */
typedef struct
{
uint32_t dwFrameInterval;
uint32_t dwBitRate;
uint16_t bmHints;
uint16_t wConfigurationIndex;
uint16_t wWidth;
uint16_t wHeight;
uint16_t wSliceUnits;
uint16_t wSliceMode;
uint16_t wProfile;
uint16_t wIFramePeriod;
uint16_t wEstimatedVideoDelay;
uint16_t wEstimatedMaxConfigDelay;
uint8_t bUsageType;
uint8_t bRateControlMode;
uint8_t bTemporalScaleMode;
uint8_t bSpatialScaleMode;
uint8_t bSNRScaleMode;
uint8_t bStreamMuxOption;
uint8_t bStreamFormat;
uint8_t bEntropyCABAC;
uint8_t bTimestamp;
uint8_t bNumOfReorderFrames;
uint8_t bPreviewFlipped;
uint8_t bView;
uint8_t bReserved1;
uint8_t bReserved2;
uint8_t bStreamID;
uint8_t bSpatialLayerRatio;
uint16_t wLeakyBucketSize;
} uvcx_video_config_probe_commit_t;
/* encoder reset struct - packed */
typedef struct
{
uint16_t wLayerID;
} uvcx_encoder_reset;
/* xu_descriptor struct - packed */
typedef struct
{
int8_t bLength;
int8_t bDescriptorType;
int8_t bDescriptorSubType;
int8_t bUnitID;
uint8_t guidExtensionCode[16];
} xu_descriptor;
#define WINPR_PACK_POP
#include <winpr/pack.h>
WINPR_ATTR_NODISCARD
FREERDP_LOCAL uint8_t get_uvc_h624_unit_id(const char* deviceId);
WINPR_ATTR_NODISCARD
FREERDP_LOCAL BOOL set_h264_muxed_format(CamV4lStream* stream,
const CAM_MEDIA_TYPE_DESCRIPTION* mediaType);
#endif /* UVC_H264_H */