Merge pull request #8636 from german77/irs_cluster_release

service: irs: Implement clustering processor
This commit is contained in:
liamwhite 2022-07-27 15:54:28 -04:00 committed by GitHub
commit 64fd9f41a7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 323 additions and 7 deletions

View file

@ -451,6 +451,7 @@ add_library(core STATIC
hle/service/hid/hidbus.h
hle/service/hid/irs.cpp
hle/service/hid/irs.h
hle/service/hid/irs_ring_lifo.h
hle/service/hid/ring_lifo.h
hle/service/hid/xcd.cpp
hle/service/hid/xcd.h

View file

@ -166,7 +166,7 @@ void IRS::RunClusteringProcessor(Kernel::HLERequestContext& ctx) {
if (result.IsSuccess()) {
auto& device = GetIrCameraSharedMemoryDeviceEntry(parameters.camera_handle);
MakeProcessor<ClusteringProcessor>(parameters.camera_handle, device);
MakeProcessorWithCoreContext<ClusteringProcessor>(parameters.camera_handle, device);
auto& image_transfer_processor =
GetProcessor<ClusteringProcessor>(parameters.camera_handle);
image_transfer_processor.SetConfig(parameters.processor_config);

View file

@ -0,0 +1,47 @@
// SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
#pragma once
#include <array>
#include "common/common_types.h"
namespace Service::IRS {
template <typename State, std::size_t max_buffer_size>
struct Lifo {
s64 sampling_number{};
s64 buffer_count{};
std::array<State, max_buffer_size> entries{};
const State& ReadCurrentEntry() const {
return entries[GetBufferTail()];
}
const State& ReadPreviousEntry() const {
return entries[GetPreviousEntryIndex()];
}
s64 GetBufferTail() const {
return sampling_number % max_buffer_size;
}
std::size_t GetPreviousEntryIndex() const {
return static_cast<size_t>((GetBufferTail() + max_buffer_size - 1) % max_buffer_size);
}
std::size_t GetNextEntryIndex() const {
return static_cast<size_t>((GetBufferTail() + 1) % max_buffer_size);
}
void WriteNextEntry(const State& new_state) {
if (buffer_count < static_cast<s64>(max_buffer_size)) {
buffer_count++;
}
sampling_number++;
entries[GetBufferTail()] = new_state;
}
};
} // namespace Service::IRS

View file

@ -1,34 +1,265 @@
// SPDX-FileCopyrightText: Copyright 2022 yuzu Emulator Project
// SPDX-License-Identifier: GPL-3.0-or-later
#include <queue>
#include "core/hid/emulated_controller.h"
#include "core/hid/hid_core.h"
#include "core/hle/service/hid/irsensor/clustering_processor.h"
namespace Service::IRS {
ClusteringProcessor::ClusteringProcessor(Core::IrSensor::DeviceFormat& device_format)
: device(device_format) {
ClusteringProcessor::ClusteringProcessor(Core::HID::HIDCore& hid_core_,
Core::IrSensor::DeviceFormat& device_format,
std::size_t npad_index)
: device{device_format} {
npad_device = hid_core_.GetEmulatedControllerByIndex(npad_index);
device.mode = Core::IrSensor::IrSensorMode::ClusteringProcessor;
device.camera_status = Core::IrSensor::IrCameraStatus::Unconnected;
device.camera_internal_status = Core::IrSensor::IrCameraInternalStatus::Stopped;
SetDefaultConfig();
shared_memory = std::construct_at(
reinterpret_cast<ClusteringSharedMemory*>(&device_format.state.processor_raw_data));
Core::HID::ControllerUpdateCallback engine_callback{
.on_change = [this](Core::HID::ControllerTriggerType type) { OnControllerUpdate(type); },
.is_npad_service = true,
};
callback_key = npad_device->SetCallback(engine_callback);
}
ClusteringProcessor::~ClusteringProcessor() = default;
ClusteringProcessor::~ClusteringProcessor() {
npad_device->DeleteCallback(callback_key);
};
void ClusteringProcessor::StartProcessor() {}
void ClusteringProcessor::StartProcessor() {
device.camera_status = Core::IrSensor::IrCameraStatus::Available;
device.camera_internal_status = Core::IrSensor::IrCameraInternalStatus::Ready;
}
void ClusteringProcessor::SuspendProcessor() {}
void ClusteringProcessor::StopProcessor() {}
void ClusteringProcessor::OnControllerUpdate(Core::HID::ControllerTriggerType type) {
if (type != Core::HID::ControllerTriggerType::IrSensor) {
return;
}
next_state = {};
const auto camera_data = npad_device->GetCamera();
auto filtered_image = camera_data.data;
RemoveLowIntensityData(filtered_image);
const auto window_start_x = static_cast<std::size_t>(current_config.window_of_interest.x);
const auto window_start_y = static_cast<std::size_t>(current_config.window_of_interest.y);
const auto window_end_x =
window_start_x + static_cast<std::size_t>(current_config.window_of_interest.width);
const auto window_end_y =
window_start_y + static_cast<std::size_t>(current_config.window_of_interest.height);
for (std::size_t y = window_start_y; y < window_end_y; y++) {
for (std::size_t x = window_start_x; x < window_end_x; x++) {
u8 pixel = GetPixel(filtered_image, x, y);
if (pixel == 0) {
continue;
}
const auto cluster = GetClusterProperties(filtered_image, x, y);
if (cluster.pixel_count > current_config.pixel_count_max) {
continue;
}
if (cluster.pixel_count < current_config.pixel_count_min) {
continue;
}
// Cluster object limit reached
if (next_state.object_count >= next_state.data.size()) {
continue;
}
next_state.data[next_state.object_count] = cluster;
next_state.object_count++;
}
}
next_state.sampling_number = camera_data.sample;
next_state.timestamp = next_state.timestamp + 131;
next_state.ambient_noise_level = Core::IrSensor::CameraAmbientNoiseLevel::Low;
shared_memory->clustering_lifo.WriteNextEntry(next_state);
if (!IsProcessorActive()) {
StartProcessor();
}
}
void ClusteringProcessor::RemoveLowIntensityData(std::vector<u8>& data) {
for (u8& pixel : data) {
if (pixel < current_config.pixel_count_min) {
pixel = 0;
}
}
}
ClusteringProcessor::ClusteringData ClusteringProcessor::GetClusterProperties(std::vector<u8>& data,
std::size_t x,
std::size_t y) {
using DataPoint = Common::Point<std::size_t>;
std::queue<DataPoint> search_points{};
ClusteringData current_cluster = GetPixelProperties(data, x, y);
SetPixel(data, x, y, 0);
search_points.emplace<DataPoint>({x, y});
while (!search_points.empty()) {
const auto point = search_points.front();
search_points.pop();
// Avoid negative numbers
if (point.x == 0 || point.y == 0) {
continue;
}
std::array<DataPoint, 4> new_points{
DataPoint{point.x - 1, point.y},
{point.x, point.y - 1},
{point.x + 1, point.y},
{point.x, point.y + 1},
};
for (const auto new_point : new_points) {
if (new_point.x >= width) {
continue;
}
if (new_point.y >= height) {
continue;
}
if (GetPixel(data, new_point.x, new_point.y) < current_config.object_intensity_min) {
continue;
}
const ClusteringData cluster = GetPixelProperties(data, new_point.x, new_point.y);
current_cluster = MergeCluster(current_cluster, cluster);
SetPixel(data, new_point.x, new_point.y, 0);
search_points.emplace<DataPoint>({new_point.x, new_point.y});
}
}
return current_cluster;
}
ClusteringProcessor::ClusteringData ClusteringProcessor::GetPixelProperties(
const std::vector<u8>& data, std::size_t x, std::size_t y) const {
return {
.average_intensity = GetPixel(data, x, y) / 255.0f,
.centroid =
{
.x = static_cast<f32>(x),
.y = static_cast<f32>(y),
},
.pixel_count = 1,
.bound =
{
.x = static_cast<s16>(x),
.y = static_cast<s16>(y),
.width = 1,
.height = 1,
},
};
}
ClusteringProcessor::ClusteringData ClusteringProcessor::MergeCluster(
const ClusteringData a, const ClusteringData b) const {
const f32 a_pixel_count = static_cast<f32>(a.pixel_count);
const f32 b_pixel_count = static_cast<f32>(b.pixel_count);
const f32 pixel_count = a_pixel_count + b_pixel_count;
const f32 average_intensity =
(a.average_intensity * a_pixel_count + b.average_intensity * b_pixel_count) / pixel_count;
const Core::IrSensor::IrsCentroid centroid = {
.x = (a.centroid.x * a_pixel_count + b.centroid.x * b_pixel_count) / pixel_count,
.y = (a.centroid.y * a_pixel_count + b.centroid.y * b_pixel_count) / pixel_count,
};
s16 bound_start_x = a.bound.x < b.bound.x ? a.bound.x : b.bound.x;
s16 bound_start_y = a.bound.y < b.bound.y ? a.bound.y : b.bound.y;
s16 a_bound_end_x = a.bound.x + a.bound.width;
s16 a_bound_end_y = a.bound.y + a.bound.height;
s16 b_bound_end_x = b.bound.x + b.bound.width;
s16 b_bound_end_y = b.bound.y + b.bound.height;
const Core::IrSensor::IrsRect bound = {
.x = bound_start_x,
.y = bound_start_y,
.width = a_bound_end_x > b_bound_end_x ? static_cast<s16>(a_bound_end_x - bound_start_x)
: static_cast<s16>(b_bound_end_x - bound_start_x),
.height = a_bound_end_y > b_bound_end_y ? static_cast<s16>(a_bound_end_y - bound_start_y)
: static_cast<s16>(b_bound_end_y - bound_start_y),
};
return {
.average_intensity = average_intensity,
.centroid = centroid,
.pixel_count = static_cast<u32>(pixel_count),
.bound = bound,
};
}
u8 ClusteringProcessor::GetPixel(const std::vector<u8>& data, std::size_t x, std::size_t y) const {
if ((y * width) + x > data.size()) {
return 0;
}
return data[(y * width) + x];
}
void ClusteringProcessor::SetPixel(std::vector<u8>& data, std::size_t x, std::size_t y, u8 value) {
if ((y * width) + x > data.size()) {
return;
}
data[(y * width) + x] = value;
}
void ClusteringProcessor::SetDefaultConfig() {
using namespace std::literals::chrono_literals;
current_config.camera_config.exposure_time = std::chrono::microseconds(200ms).count();
current_config.camera_config.gain = 2;
current_config.camera_config.is_negative_used = false;
current_config.camera_config.light_target = Core::IrSensor::CameraLightTarget::BrightLeds;
current_config.window_of_interest = {
.x = 0,
.y = 0,
.width = width,
.height = height,
};
current_config.pixel_count_min = 3;
current_config.pixel_count_max = static_cast<u32>(GetDataSize(format));
current_config.is_external_light_filter_enabled = true;
current_config.object_intensity_min = 150;
npad_device->SetCameraFormat(format);
}
void ClusteringProcessor::SetConfig(Core::IrSensor::PackedClusteringProcessorConfig config) {
current_config.camera_config.exposure_time = config.camera_config.exposure_time;
current_config.camera_config.gain = config.camera_config.gain;
current_config.camera_config.is_negative_used = config.camera_config.is_negative_used;
current_config.camera_config.light_target =
static_cast<Core::IrSensor::CameraLightTarget>(config.camera_config.light_target);
current_config.window_of_interest = config.window_of_interest;
current_config.pixel_count_min = config.pixel_count_min;
current_config.pixel_count_max = config.pixel_count_max;
current_config.is_external_light_filter_enabled = config.is_external_light_filter_enabled;
current_config.object_intensity_min = config.object_intensity_min;
LOG_INFO(Service_IRS,
"Processor config, exposure_time={}, gain={}, is_negative_used={}, "
"light_target={}, window_of_interest=({}, {}, {}, {}), pixel_count_min={}, "
"pixel_count_max={}, is_external_light_filter_enabled={}, object_intensity_min={}",
current_config.camera_config.exposure_time, current_config.camera_config.gain,
current_config.camera_config.is_negative_used,
current_config.camera_config.light_target, current_config.window_of_interest.x,
current_config.window_of_interest.y, current_config.window_of_interest.width,
current_config.window_of_interest.height, current_config.pixel_count_min,
current_config.pixel_count_max, current_config.is_external_light_filter_enabled,
current_config.object_intensity_min);
npad_device->SetCameraFormat(format);
}
} // namespace Service::IRS

View file

@ -5,12 +5,19 @@
#include "common/common_types.h"
#include "core/hid/irs_types.h"
#include "core/hle/service/hid/irs_ring_lifo.h"
#include "core/hle/service/hid/irsensor/processor_base.h"
namespace Core::HID {
class EmulatedController;
} // namespace Core::HID
namespace Service::IRS {
class ClusteringProcessor final : public ProcessorBase {
public:
explicit ClusteringProcessor(Core::IrSensor::DeviceFormat& device_format);
explicit ClusteringProcessor(Core::HID::HIDCore& hid_core_,
Core::IrSensor::DeviceFormat& device_format,
std::size_t npad_index);
~ClusteringProcessor() override;
// Called when the processor is initialized
@ -26,6 +33,10 @@ public:
void SetConfig(Core::IrSensor::PackedClusteringProcessorConfig config);
private:
static constexpr auto format = Core::IrSensor::ImageTransferProcessorFormat::Size320x240;
static constexpr std::size_t width = 320;
static constexpr std::size_t height = 240;
// This is nn::irsensor::ClusteringProcessorConfig
struct ClusteringProcessorConfig {
Core::IrSensor::CameraConfig camera_config;
@ -68,7 +79,32 @@ private:
static_assert(sizeof(ClusteringProcessorState) == 0x198,
"ClusteringProcessorState is an invalid size");
struct ClusteringSharedMemory {
Service::IRS::Lifo<ClusteringProcessorState, 6> clustering_lifo;
static_assert(sizeof(clustering_lifo) == 0x9A0, "clustering_lifo is an invalid size");
INSERT_PADDING_WORDS(0x11F);
};
static_assert(sizeof(ClusteringSharedMemory) == 0xE20,
"ClusteringSharedMemory is an invalid size");
void OnControllerUpdate(Core::HID::ControllerTriggerType type);
void RemoveLowIntensityData(std::vector<u8>& data);
ClusteringData GetClusterProperties(std::vector<u8>& data, std::size_t x, std::size_t y);
ClusteringData GetPixelProperties(const std::vector<u8>& data, std::size_t x,
std::size_t y) const;
ClusteringData MergeCluster(const ClusteringData a, const ClusteringData b) const;
u8 GetPixel(const std::vector<u8>& data, std::size_t x, std::size_t y) const;
void SetPixel(std::vector<u8>& data, std::size_t x, std::size_t y, u8 value);
// Sets config parameters of the camera
void SetDefaultConfig();
ClusteringSharedMemory* shared_memory = nullptr;
ClusteringProcessorState next_state{};
ClusteringProcessorConfig current_config{};
Core::IrSensor::DeviceFormat& device;
Core::HID::EmulatedController* npad_device;
int callback_key{};
};
} // namespace Service::IRS

View file

@ -804,6 +804,7 @@ void GRenderWindow::TouchEndEvent() {
}
void GRenderWindow::InitializeCamera() {
constexpr auto camera_update_ms = std::chrono::milliseconds{50}; // (50ms, 20Hz)
if (!Settings::values.enable_ir_sensor) {
return;
}
@ -837,7 +838,7 @@ void GRenderWindow::InitializeCamera() {
camera_timer = std::make_unique<QTimer>();
connect(camera_timer.get(), &QTimer::timeout, [this] { RequestCameraCapture(); });
// This timer should be dependent of camera resolution 5ms for every 100 pixels
camera_timer->start(100);
camera_timer->start(camera_update_ms);
}
void GRenderWindow::FinalizeCamera() {