Merge remote-tracking branch 'upstream/master'
* upstream/master:
meson: Simplify check for _FORTIFY_SOURCE
android: camera_device: Fix requestedStream handling
meson: Don't set _FORTIFY_SOURCE for ChromeOS
ipa: rpi: Fix segfault when parsing invalid json file
ipa: rpi: agc: Use channel constraints in the AGC algorithm
ipa: rpi: agc: Add AgcChannelConstraint class
ipa: rpi: agc: Implementation of multi-channel AGC
ipa: rpi: agc: Reorganise code for multi-channel AGC
ipa: rpi: histogram: Add interBinMean()
README.rst: Drop recommendation to install meson using pip3
ipa: rpi: vc4: Make the StatisticsPtr construction clearer
ipa: rpi: agc: Split AgcStatus into AgcStatus and AgcPrepareStatus
ipa: rpi: agc: Filter exposures before dealing with digital gain
ipa: rpi: agc: Fetch AWB status in process method, not prepare
pipeline: rpi: Simplify buffer id generation
pipeline: rpi: Rename RPi::Stream::setExternalBuffer()
pipeline: rpi: Remove additional external dma buf handling logic
pipeline: rpi: Increase buffer import count to 32
libcamera: stream: Document stride will be 0 for compressed formats
ipa: rpi: imx290: Hide one frame on startup
Change-Id: I938c9b448375d0f8990b1ee17cfb756b9869d903
diff --git a/README.rst b/README.rst
index df2818e..6eed723 100644
--- a/README.rst
+++ b/README.rst
@@ -47,15 +47,7 @@
Either {g++, clang}
Meson Build system: [required]
- meson (>= 0.56) ninja-build pkg-config
-
- If your distribution doesn't provide a recent enough version of meson,
- you can install or upgrade it using pip3.
-
- .. code::
-
- pip3 install --user meson
- pip3 install --user --upgrade meson
+ meson (>= 0.57) ninja-build pkg-config
for the libcamera core: [required]
libyaml-dev python3-yaml python3-ply python3-jinja2
diff --git a/meson.build b/meson.build
index 7959b53..e9a1c7e 100644
--- a/meson.build
+++ b/meson.build
@@ -99,12 +99,18 @@
error('clang version is too old, libcamera requires 9.0 or newer')
endif
- # Turn _FORTIFY_SOURCE by default on optimised builds (as it requires -O1
- # or higher). This is needed on clang only as gcc enables it by default.
+ # Turn _FORTIFY_SOURCE by default on. This is needed on clang only as gcc
+ # enables it by default. FORTIFY will not work properly with `-O0`, and may
+ # result in macro redefinition errors if the user already has a setting for
+ # `-D_FORTIFY_SOURCE`. Do not enable FORTIFY in either of those cases.
if get_option('optimization') != '0'
- common_arguments += [
- '-D_FORTIFY_SOURCE=2',
- ]
+ fortify = cc.get_define('_FORTIFY_SOURCE')
+ if fortify == ''
+ message('Adding _FORTIFY_SOURCE')
+ common_arguments += [
+ '-D_FORTIFY_SOURCE=2',
+ ]
+ endif
endif
# Use libc++ by default if available instead of libstdc++ when compiling
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index 1f7ce44..25cedd4 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -1077,7 +1077,7 @@
descriptor->request_->addBuffer(sourceStream->stream(),
frameBuffer, nullptr);
- requestedStreams.erase(sourceStream);
+ requestedStreams.insert(sourceStream);
}
/*
diff --git a/src/ipa/rpi/cam_helper/cam_helper_imx290.cpp b/src/ipa/rpi/cam_helper/cam_helper_imx290.cpp
index 7d6f5b5..d98b51c 100644
--- a/src/ipa/rpi/cam_helper/cam_helper_imx290.cpp
+++ b/src/ipa/rpi/cam_helper/cam_helper_imx290.cpp
@@ -19,6 +19,7 @@
double gain(uint32_t gainCode) const override;
void getDelays(int &exposureDelay, int &gainDelay,
int &vblankDelay, int &hblankDelay) const override;
+ unsigned int hideFramesStartup() const override;
unsigned int hideFramesModeSwitch() const override;
private:
@@ -54,6 +55,12 @@
hblankDelay = 2;
}
+unsigned int CamHelperImx290::hideFramesStartup() const
+{
+ /* On startup, we seem to get 1 bad frame. */
+ return 1;
+}
+
unsigned int CamHelperImx290::hideFramesModeSwitch() const
{
/* After a mode switch, we seem to get 1 bad frame. */
diff --git a/src/ipa/rpi/common/ipa_base.cpp b/src/ipa/rpi/common/ipa_base.cpp
index b11f10f..f7e7ad5 100644
--- a/src/ipa/rpi/common/ipa_base.cpp
+++ b/src/ipa/rpi/common/ipa_base.cpp
@@ -699,9 +699,9 @@
}
if (ctrl.second.get<bool>() == false)
- agc->disableAuto();
+ agc->disableAuto(0);
else
- agc->enableAuto();
+ agc->enableAuto(0);
libcameraMetadata_.set(controls::AeEnable, ctrl.second.get<bool>());
break;
@@ -717,7 +717,7 @@
}
/* The control provides units of microseconds. */
- agc->setFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
+ agc->setFixedShutter(0, ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
@@ -732,7 +732,7 @@
break;
}
- agc->setFixedAnalogueGain(ctrl.second.get<float>());
+ agc->setFixedAnalogueGain(0, ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
@@ -770,7 +770,7 @@
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
- agc->setConstraintMode(ConstraintModeTable.at(idx));
+ agc->setConstraintMode(0, ConstraintModeTable.at(idx));
libcameraMetadata_.set(controls::AeConstraintMode, idx);
} else {
LOG(IPARPI, Error) << "Constraint mode " << idx
@@ -790,7 +790,7 @@
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
- agc->setExposureMode(ExposureModeTable.at(idx));
+ agc->setExposureMode(0, ExposureModeTable.at(idx));
libcameraMetadata_.set(controls::AeExposureMode, idx);
} else {
LOG(IPARPI, Error) << "Exposure mode " << idx
@@ -813,7 +813,7 @@
* So convert to 2^EV
*/
double ev = pow(2.0, ctrl.second.get<float>());
- agc->setEv(ev);
+ agc->setEv(0, ev);
libcameraMetadata_.set(controls::ExposureValue,
ctrl.second.get<float>());
break;
@@ -833,12 +833,12 @@
switch (mode) {
case controls::FlickerOff:
- agc->setFlickerPeriod(0us);
+ agc->setFlickerPeriod(0, 0us);
break;
case controls::FlickerManual:
- agc->setFlickerPeriod(flickerState_.manualPeriod);
+ agc->setFlickerPeriod(0, flickerState_.manualPeriod);
break;
@@ -872,7 +872,7 @@
* first, and the period updated after, or vice versa.
*/
if (flickerState_.mode == controls::FlickerManual)
- agc->setFlickerPeriod(flickerState_.manualPeriod);
+ agc->setFlickerPeriod(0, flickerState_.manualPeriod);
break;
}
@@ -1216,10 +1216,10 @@
libcameraMetadata_.set(controls::LensPosition, *deviceStatus->lensPosition);
}
- AgcStatus *agcStatus = rpiMetadata.getLocked<AgcStatus>("agc.status");
- if (agcStatus) {
- libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
- libcameraMetadata_.set(controls::DigitalGain, agcStatus->digitalGain);
+ AgcPrepareStatus *agcPrepareStatus = rpiMetadata.getLocked<AgcPrepareStatus>("agc.prepare_status");
+ if (agcPrepareStatus) {
+ libcameraMetadata_.set(controls::AeLocked, agcPrepareStatus->locked);
+ libcameraMetadata_.set(controls::DigitalGain, agcPrepareStatus->digitalGain);
}
LuxStatus *luxStatus = rpiMetadata.getLocked<LuxStatus>("lux.status");
diff --git a/src/ipa/rpi/controller/agc_algorithm.h b/src/ipa/rpi/controller/agc_algorithm.h
index b6949da..b898656 100644
--- a/src/ipa/rpi/controller/agc_algorithm.h
+++ b/src/ipa/rpi/controller/agc_algorithm.h
@@ -21,16 +21,19 @@
/* An AGC algorithm must provide the following: */
virtual unsigned int getConvergenceFrames() const = 0;
virtual std::vector<double> const &getWeights() const = 0;
- virtual void setEv(double ev) = 0;
- virtual void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) = 0;
- virtual void setFixedShutter(libcamera::utils::Duration fixedShutter) = 0;
+ virtual void setEv(unsigned int channel, double ev) = 0;
+ virtual void setFlickerPeriod(unsigned int channel,
+ libcamera::utils::Duration flickerPeriod) = 0;
+ virtual void setFixedShutter(unsigned int channel,
+ libcamera::utils::Duration fixedShutter) = 0;
virtual void setMaxShutter(libcamera::utils::Duration maxShutter) = 0;
- virtual void setFixedAnalogueGain(double fixedAnalogueGain) = 0;
+ virtual void setFixedAnalogueGain(unsigned int channel, double fixedAnalogueGain) = 0;
virtual void setMeteringMode(std::string const &meteringModeName) = 0;
- virtual void setExposureMode(std::string const &exposureModeName) = 0;
- virtual void setConstraintMode(std::string const &contraintModeName) = 0;
- virtual void enableAuto() = 0;
- virtual void disableAuto() = 0;
+ virtual void setExposureMode(unsigned int channel, std::string const &exposureModeName) = 0;
+ virtual void setConstraintMode(unsigned int channel, std::string const &contraintModeName) = 0;
+ virtual void enableAuto(unsigned int channel) = 0;
+ virtual void disableAuto(unsigned int channel) = 0;
+ virtual void setActiveChannels(const std::vector<unsigned int> &activeChannels) = 0;
};
} /* namespace RPiController */
diff --git a/src/ipa/rpi/controller/agc_status.h b/src/ipa/rpi/controller/agc_status.h
index 6c112e7..e5c4ee2 100644
--- a/src/ipa/rpi/controller/agc_status.h
+++ b/src/ipa/rpi/controller/agc_status.h
@@ -11,8 +11,10 @@
#include <libcamera/base/utils.h>
/*
- * The AGC algorithm should post the following structure into the image's
- * "agc.status" metadata.
+ * The AGC algorithm process method should post an AgcStatus into the image
+ * metadata under the tag "agc.status".
+ * The AGC algorithm prepare method should post an AgcPrepareStatus instead
+ * under "agc.prepare_status".
*/
/*
@@ -34,6 +36,10 @@
int floatingRegionEnable;
libcamera::utils::Duration fixedShutter;
double fixedAnalogueGain;
+ unsigned int channel;
+};
+
+struct AgcPrepareStatus {
double digitalGain;
int locked;
};
diff --git a/src/ipa/rpi/controller/controller.cpp b/src/ipa/rpi/controller/controller.cpp
index fa17211..14d245d 100644
--- a/src/ipa/rpi/controller/controller.cpp
+++ b/src/ipa/rpi/controller/controller.cpp
@@ -56,6 +56,9 @@
}
std::unique_ptr<YamlObject> root = YamlParser::parse(file);
+ if (!root)
+ return -EINVAL;
+
double version = (*root)["version"].get<double>(1.0);
target_ = (*root)["target"].get<std::string>("bcm2835");
diff --git a/src/ipa/rpi/controller/histogram.cpp b/src/ipa/rpi/controller/histogram.cpp
index 16a9207..0a27ba2 100644
--- a/src/ipa/rpi/controller/histogram.cpp
+++ b/src/ipa/rpi/controller/histogram.cpp
@@ -45,20 +45,26 @@
return first + frac;
}
-double Histogram::interQuantileMean(double qLo, double qHi) const
+double Histogram::interBinMean(double binLo, double binHi) const
{
- assert(qHi > qLo);
- double pLo = quantile(qLo);
- double pHi = quantile(qHi, (int)pLo);
+ assert(binHi > binLo);
double sumBinFreq = 0, cumulFreq = 0;
- for (double pNext = floor(pLo) + 1.0; pNext <= ceil(pHi);
- pLo = pNext, pNext += 1.0) {
- int bin = floor(pLo);
+ for (double binNext = floor(binLo) + 1.0; binNext <= ceil(binHi);
+ binLo = binNext, binNext += 1.0) {
+ int bin = floor(binLo);
double freq = (cumulative_[bin + 1] - cumulative_[bin]) *
- (std::min(pNext, pHi) - pLo);
+ (std::min(binNext, binHi) - binLo);
sumBinFreq += bin * freq;
cumulFreq += freq;
}
/* add 0.5 to give an average for bin mid-points */
return sumBinFreq / cumulFreq + 0.5;
}
+
+double Histogram::interQuantileMean(double qLo, double qHi) const
+{
+ assert(qHi > qLo);
+ double pLo = quantile(qLo);
+ double pHi = quantile(qHi, (int)pLo);
+ return interBinMean(pLo, pHi);
+}
diff --git a/src/ipa/rpi/controller/histogram.h b/src/ipa/rpi/controller/histogram.h
index 6b3e3a9..e2c5509 100644
--- a/src/ipa/rpi/controller/histogram.h
+++ b/src/ipa/rpi/controller/histogram.h
@@ -38,6 +38,8 @@
uint64_t total() const { return cumulative_[cumulative_.size() - 1]; }
/* Cumulative frequency up to a (fractional) point in a bin. */
uint64_t cumulativeFreq(double bin) const;
+ /* Return the mean value between two (fractional) bins. */
+ double interBinMean(double binLo, double binHi) const;
/*
* Return the (fractional) bin of the point q (0 <= q <= 1) through the
* histogram. Optionally provide limits to help.
diff --git a/src/ipa/rpi/controller/meson.build b/src/ipa/rpi/controller/meson.build
index feb0334..20b9cda 100644
--- a/src/ipa/rpi/controller/meson.build
+++ b/src/ipa/rpi/controller/meson.build
@@ -8,6 +8,7 @@
'pwl.cpp',
'rpi/af.cpp',
'rpi/agc.cpp',
+ 'rpi/agc_channel.cpp',
'rpi/alsc.cpp',
'rpi/awb.cpp',
'rpi/black_level.cpp',
diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp
index ae9ff21..870cb43 100644
--- a/src/ipa/rpi/controller/rpi/agc.cpp
+++ b/src/ipa/rpi/controller/rpi/agc.cpp
@@ -5,20 +5,12 @@
* agc.cpp - AGC/AEC control algorithm
*/
-#include <algorithm>
-#include <map>
-#include <tuple>
+#include "agc.h"
#include <libcamera/base/log.h>
-#include "../awb_status.h"
-#include "../device_status.h"
-#include "../histogram.h"
-#include "../lux_status.h"
#include "../metadata.h"
-#include "agc.h"
-
using namespace RPiController;
using namespace libcamera;
using libcamera::utils::Duration;
@@ -28,206 +20,10 @@
#define NAME "rpi.agc"
-int AgcMeteringMode::read(const libcamera::YamlObject ¶ms)
-{
- const YamlObject &yamlWeights = params["weights"];
-
- for (const auto &p : yamlWeights.asList()) {
- auto value = p.get<double>();
- if (!value)
- return -EINVAL;
- weights.push_back(*value);
- }
-
- return 0;
-}
-
-static std::tuple<int, std::string>
-readMeteringModes(std::map<std::string, AgcMeteringMode> &metering_modes,
- const libcamera::YamlObject ¶ms)
-{
- std::string first;
- int ret;
-
- for (const auto &[key, value] : params.asDict()) {
- AgcMeteringMode meteringMode;
- ret = meteringMode.read(value);
- if (ret)
- return { ret, {} };
-
- metering_modes[key] = std::move(meteringMode);
- if (first.empty())
- first = key;
- }
-
- return { 0, first };
-}
-
-int AgcExposureMode::read(const libcamera::YamlObject ¶ms)
-{
- auto value = params["shutter"].getList<double>();
- if (!value)
- return -EINVAL;
- std::transform(value->begin(), value->end(), std::back_inserter(shutter),
- [](double v) { return v * 1us; });
-
- value = params["gain"].getList<double>();
- if (!value)
- return -EINVAL;
- gain = std::move(*value);
-
- if (shutter.size() < 2 || gain.size() < 2) {
- LOG(RPiAgc, Error)
- << "AgcExposureMode: must have at least two entries in exposure profile";
- return -EINVAL;
- }
-
- if (shutter.size() != gain.size()) {
- LOG(RPiAgc, Error)
- << "AgcExposureMode: expect same number of exposure and gain entries in exposure profile";
- return -EINVAL;
- }
-
- return 0;
-}
-
-static std::tuple<int, std::string>
-readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
- const libcamera::YamlObject ¶ms)
-{
- std::string first;
- int ret;
-
- for (const auto &[key, value] : params.asDict()) {
- AgcExposureMode exposureMode;
- ret = exposureMode.read(value);
- if (ret)
- return { ret, {} };
-
- exposureModes[key] = std::move(exposureMode);
- if (first.empty())
- first = key;
- }
-
- return { 0, first };
-}
-
-int AgcConstraint::read(const libcamera::YamlObject ¶ms)
-{
- std::string boundString = params["bound"].get<std::string>("");
- transform(boundString.begin(), boundString.end(),
- boundString.begin(), ::toupper);
- if (boundString != "UPPER" && boundString != "LOWER") {
- LOG(RPiAgc, Error) << "AGC constraint type should be UPPER or LOWER";
- return -EINVAL;
- }
- bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
-
- auto value = params["q_lo"].get<double>();
- if (!value)
- return -EINVAL;
- qLo = *value;
-
- value = params["q_hi"].get<double>();
- if (!value)
- return -EINVAL;
- qHi = *value;
-
- return yTarget.read(params["y_target"]);
-}
-
-static std::tuple<int, AgcConstraintMode>
-readConstraintMode(const libcamera::YamlObject ¶ms)
-{
- AgcConstraintMode mode;
- int ret;
-
- for (const auto &p : params.asList()) {
- AgcConstraint constraint;
- ret = constraint.read(p);
- if (ret)
- return { ret, {} };
-
- mode.push_back(std::move(constraint));
- }
-
- return { 0, mode };
-}
-
-static std::tuple<int, std::string>
-readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
- const libcamera::YamlObject ¶ms)
-{
- std::string first;
- int ret;
-
- for (const auto &[key, value] : params.asDict()) {
- std::tie(ret, constraintModes[key]) = readConstraintMode(value);
- if (ret)
- return { ret, {} };
-
- if (first.empty())
- first = key;
- }
-
- return { 0, first };
-}
-
-int AgcConfig::read(const libcamera::YamlObject ¶ms)
-{
- LOG(RPiAgc, Debug) << "AgcConfig";
- int ret;
-
- std::tie(ret, defaultMeteringMode) =
- readMeteringModes(meteringModes, params["metering_modes"]);
- if (ret)
- return ret;
- std::tie(ret, defaultExposureMode) =
- readExposureModes(exposureModes, params["exposure_modes"]);
- if (ret)
- return ret;
- std::tie(ret, defaultConstraintMode) =
- readConstraintModes(constraintModes, params["constraint_modes"]);
- if (ret)
- return ret;
-
- ret = yTarget.read(params["y_target"]);
- if (ret)
- return ret;
-
- speed = params["speed"].get<double>(0.2);
- startupFrames = params["startup_frames"].get<uint16_t>(10);
- convergenceFrames = params["convergence_frames"].get<unsigned int>(6);
- fastReduceThreshold = params["fast_reduce_threshold"].get<double>(0.4);
- baseEv = params["base_ev"].get<double>(1.0);
-
- /* Start with quite a low value as ramping up is easier than ramping down. */
- defaultExposureTime = params["default_exposure_time"].get<double>(1000) * 1us;
- defaultAnalogueGain = params["default_analogue_gain"].get<double>(1.0);
-
- return 0;
-}
-
-Agc::ExposureValues::ExposureValues()
- : shutter(0s), analogueGain(0),
- totalExposure(0s), totalExposureNoDG(0s)
-{
-}
-
Agc::Agc(Controller *controller)
- : AgcAlgorithm(controller), meteringMode_(nullptr),
- exposureMode_(nullptr), constraintMode_(nullptr),
- frameCount_(0), lockCount_(0),
- lastTargetExposure_(0s), ev_(1.0), flickerPeriod_(0s),
- maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
+ : AgcAlgorithm(controller),
+ activeChannels_({ 0 }), index_(0)
{
- memset(&awb_, 0, sizeof(awb_));
- /*
- * Setting status_.totalExposureValue_ to zero initially tells us
- * it's not been calculated yet (i.e. Process hasn't yet run).
- */
- status_ = {};
- status_.ev = ev_;
}
char const *Agc::name() const
@@ -237,689 +33,300 @@
int Agc::read(const libcamera::YamlObject ¶ms)
{
- LOG(RPiAgc, Debug) << "Agc";
-
- int ret = config_.read(params);
- if (ret)
- return ret;
-
- const Size &size = getHardwareConfig().agcZoneWeights;
- for (auto const &modes : config_.meteringModes) {
- if (modes.second.weights.size() != size.width * size.height) {
- LOG(RPiAgc, Error) << "AgcMeteringMode: Incorrect number of weights";
- return -EINVAL;
- }
+ /*
+ * When there is only a single channel we can read the old style syntax.
+ * Otherwise we expect a "channels" keyword followed by a list of configurations.
+ */
+ if (!params.contains("channels")) {
+ LOG(RPiAgc, Debug) << "Single channel only";
+ channelTotalExposures_.resize(1, 0s);
+ channelData_.emplace_back();
+ return channelData_.back().channel.read(params, getHardwareConfig());
}
- /*
- * Set the config's defaults (which are the first ones it read) as our
- * current modes, until someone changes them. (they're all known to
- * exist at this point)
- */
- meteringModeName_ = config_.defaultMeteringMode;
- meteringMode_ = &config_.meteringModes[meteringModeName_];
- exposureModeName_ = config_.defaultExposureMode;
- exposureMode_ = &config_.exposureModes[exposureModeName_];
- constraintModeName_ = config_.defaultConstraintMode;
- constraintMode_ = &config_.constraintModes[constraintModeName_];
- /* Set up the "last shutter/gain" values, in case AGC starts "disabled". */
- status_.shutterTime = config_.defaultExposureTime;
- status_.analogueGain = config_.defaultAnalogueGain;
+ const auto &channels = params["channels"].asList();
+ for (auto ch = channels.begin(); ch != channels.end(); ch++) {
+ LOG(RPiAgc, Debug) << "Read AGC channel";
+ channelData_.emplace_back();
+ int ret = channelData_.back().channel.read(*ch, getHardwareConfig());
+ if (ret)
+ return ret;
+ }
+
+ LOG(RPiAgc, Debug) << "Read " << channelData_.size() << " channel(s)";
+ if (channelData_.empty()) {
+ LOG(RPiAgc, Error) << "No AGC channels provided";
+ return -1;
+ }
+
+ channelTotalExposures_.resize(channelData_.size(), 0s);
+
return 0;
}
-void Agc::disableAuto()
+int Agc::checkChannel(unsigned int channelIndex) const
{
- fixedShutter_ = status_.shutterTime;
- fixedAnalogueGain_ = status_.analogueGain;
+ if (channelIndex >= channelData_.size()) {
+ LOG(RPiAgc, Warning) << "AGC channel " << channelIndex << " not available";
+ return -1;
+ }
+
+ return 0;
}
-void Agc::enableAuto()
+void Agc::disableAuto(unsigned int channelIndex)
{
- fixedShutter_ = 0s;
- fixedAnalogueGain_ = 0;
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "disableAuto for channel " << channelIndex;
+ channelData_[channelIndex].channel.disableAuto();
+}
+
+void Agc::enableAuto(unsigned int channelIndex)
+{
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "enableAuto for channel " << channelIndex;
+ channelData_[channelIndex].channel.enableAuto();
}
unsigned int Agc::getConvergenceFrames() const
{
- /*
- * If shutter and gain have been explicitly set, there is no
- * convergence to happen, so no need to drop any frames - return zero.
- */
- if (fixedShutter_ && fixedAnalogueGain_)
- return 0;
- else
- return config_.convergenceFrames;
+ /* If there are n channels, it presumably takes n times as long to converge. */
+ return channelData_[0].channel.getConvergenceFrames() * activeChannels_.size();
}
std::vector<double> const &Agc::getWeights() const
{
/*
- * In case someone calls setMeteringMode and then this before the
- * algorithm has run and updated the meteringMode_ pointer.
+ * In future the metering weights may be determined differently, making it
+ * difficult to associate different sets of weight with different channels.
+ * Therefore we shall impose a limitation, at least for now, that all
+ * channels will use the same weights.
*/
- auto it = config_.meteringModes.find(meteringModeName_);
- if (it == config_.meteringModes.end())
- return meteringMode_->weights;
- return it->second.weights;
+ return channelData_[0].channel.getWeights();
}
-void Agc::setEv(double ev)
+void Agc::setEv(unsigned int channelIndex, double ev)
{
- ev_ = ev;
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "setEv " << ev << " for channel " << channelIndex;
+ channelData_[channelIndex].channel.setEv(ev);
}
-void Agc::setFlickerPeriod(Duration flickerPeriod)
+void Agc::setFlickerPeriod(unsigned int channelIndex, Duration flickerPeriod)
{
- flickerPeriod_ = flickerPeriod;
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "setFlickerPeriod " << flickerPeriod
+ << " for channel " << channelIndex;
+ channelData_[channelIndex].channel.setFlickerPeriod(flickerPeriod);
}
void Agc::setMaxShutter(Duration maxShutter)
{
- maxShutter_ = maxShutter;
+ /* Frame durations will be the same across all channels too. */
+ for (auto &data : channelData_)
+ data.channel.setMaxShutter(maxShutter);
}
-void Agc::setFixedShutter(Duration fixedShutter)
+void Agc::setFixedShutter(unsigned int channelIndex, Duration fixedShutter)
{
- fixedShutter_ = fixedShutter;
- /* Set this in case someone calls disableAuto() straight after. */
- status_.shutterTime = limitShutter(fixedShutter_);
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "setFixedShutter " << fixedShutter
+ << " for channel " << channelIndex;
+ channelData_[channelIndex].channel.setFixedShutter(fixedShutter);
}
-void Agc::setFixedAnalogueGain(double fixedAnalogueGain)
+void Agc::setFixedAnalogueGain(unsigned int channelIndex, double fixedAnalogueGain)
{
- fixedAnalogueGain_ = fixedAnalogueGain;
- /* Set this in case someone calls disableAuto() straight after. */
- status_.analogueGain = limitGain(fixedAnalogueGain);
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "setFixedAnalogueGain " << fixedAnalogueGain
+ << " for channel " << channelIndex;
+ channelData_[channelIndex].channel.setFixedAnalogueGain(fixedAnalogueGain);
}
void Agc::setMeteringMode(std::string const &meteringModeName)
{
- meteringModeName_ = meteringModeName;
+ /* Metering modes will be the same across all channels too. */
+ for (auto &data : channelData_)
+ data.channel.setMeteringMode(meteringModeName);
}
-void Agc::setExposureMode(std::string const &exposureModeName)
+void Agc::setExposureMode(unsigned int channelIndex, std::string const &exposureModeName)
{
- exposureModeName_ = exposureModeName;
+ if (checkChannel(channelIndex))
+ return;
+
+ LOG(RPiAgc, Debug) << "setExposureMode " << exposureModeName
+ << " for channel " << channelIndex;
+ channelData_[channelIndex].channel.setExposureMode(exposureModeName);
}
-void Agc::setConstraintMode(std::string const &constraintModeName)
+void Agc::setConstraintMode(unsigned int channelIndex, std::string const &constraintModeName)
{
- constraintModeName_ = constraintModeName;
+ if (checkChannel(channelIndex))
+ return;
+
+ channelData_[channelIndex].channel.setConstraintMode(constraintModeName);
+}
+
+template<typename T>
+std::ostream &operator<<(std::ostream &os, const std::vector<T> &v)
+{
+ os << "{";
+ for (const auto &e : v)
+ os << " " << e;
+ os << " }";
+ return os;
+}
+
+void Agc::setActiveChannels(const std::vector<unsigned int> &activeChannels)
+{
+ if (activeChannels.empty()) {
+ LOG(RPiAgc, Warning) << "No active AGC channels supplied";
+ return;
+ }
+
+ for (auto index : activeChannels)
+ if (checkChannel(index))
+ return;
+
+ LOG(RPiAgc, Debug) << "setActiveChannels " << activeChannels;
+ activeChannels_ = activeChannels;
}
void Agc::switchMode(CameraMode const &cameraMode,
Metadata *metadata)
{
- /* AGC expects the mode sensitivity always to be non-zero. */
- ASSERT(cameraMode.sensitivity);
-
- housekeepConfig();
-
/*
- * Store the mode in the local state. We must cache the sensitivity of
- * of the previous mode for the calculations below.
+ * We run switchMode on every channel, and then we're going to start over
+ * with the first active channel again which means that this channel's
+ * status needs to be the one we leave in the metadata.
*/
- double lastSensitivity = mode_.sensitivity;
- mode_ = cameraMode;
+ AgcStatus status;
- Duration fixedShutter = limitShutter(fixedShutter_);
- if (fixedShutter && fixedAnalogueGain_) {
- /* We're going to reset the algorithm here with these fixed values. */
-
- fetchAwbStatus(metadata);
- double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
- ASSERT(minColourGain != 0.0);
-
- /* This is the equivalent of computeTargetExposure and applyDigitalGain. */
- target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
- target_.totalExposure = target_.totalExposureNoDG / minColourGain;
-
- /* Equivalent of filterExposure. This resets any "history". */
- filtered_ = target_;
-
- /* Equivalent of divideUpExposure. */
- filtered_.shutter = fixedShutter;
- filtered_.analogueGain = fixedAnalogueGain_;
- } else if (status_.totalExposureValue) {
- /*
- * On a mode switch, various things could happen:
- * - the exposure profile might change
- * - a fixed exposure or gain might be set
- * - the new mode's sensitivity might be different
- * We cope with the last of these by scaling the target values. After
- * that we just need to re-divide the exposure/gain according to the
- * current exposure profile, which takes care of everything else.
- */
-
- double ratio = lastSensitivity / cameraMode.sensitivity;
- target_.totalExposureNoDG *= ratio;
- target_.totalExposure *= ratio;
- filtered_.totalExposureNoDG *= ratio;
- filtered_.totalExposure *= ratio;
-
- divideUpExposure();
- } else {
- /*
- * We come through here on startup, when at least one of the shutter
- * or gain has not been fixed. We must still write those values out so
- * that they will be applied immediately. We supply some arbitrary defaults
- * for any that weren't set.
- */
-
- /* Equivalent of divideUpExposure. */
- filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
- filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
+ for (unsigned int channelIndex = 0; channelIndex < channelData_.size(); channelIndex++) {
+ LOG(RPiAgc, Debug) << "switchMode for channel " << channelIndex;
+ channelData_[channelIndex].channel.switchMode(cameraMode, metadata);
+ if (channelIndex == activeChannels_[0])
+ metadata->get("agc.status", status);
}
- writeAndFinish(metadata, false);
+ status.channel = activeChannels_[0];
+ metadata->set("agc.status", status);
+ index_ = 0;
+}
+
+static void getDelayedChannelIndex(Metadata *metadata, const char *message, unsigned int &channelIndex)
+{
+ std::unique_lock<RPiController::Metadata> lock(*metadata);
+ AgcStatus *status = metadata->getLocked<AgcStatus>("agc.delayed_status");
+ if (status)
+ channelIndex = status->channel;
+ else {
+ /* This does happen at startup, otherwise it would be a Warning or Error. */
+ LOG(RPiAgc, Debug) << message;
+ }
+}
+
+static libcamera::utils::Duration
+setCurrentChannelIndexGetExposure(Metadata *metadata, const char *message, unsigned int channelIndex)
+{
+ std::unique_lock<RPiController::Metadata> lock(*metadata);
+ AgcStatus *status = metadata->getLocked<AgcStatus>("agc.status");
+ libcamera::utils::Duration dur = 0s;
+
+ if (status) {
+ status->channel = channelIndex;
+ dur = status->totalExposureValue;
+ } else {
+ /* This does happen at startup, otherwise it would be a Warning or Error. */
+ LOG(RPiAgc, Debug) << message;
+ }
+
+ return dur;
}
void Agc::prepare(Metadata *imageMetadata)
{
- Duration totalExposureValue = status_.totalExposureValue;
- AgcStatus delayedStatus;
+ /*
+ * The DeviceStatus in the metadata should be correct for the image we
+ * are processing. The delayed status should tell us what channel this frame
+ * was from, so we will use that channel's prepare method.
+ *
+ * \todo To be honest, there's not much that's stateful in the prepare methods
+ * so we should perhaps re-evaluate whether prepare even needs to be done
+ * "per channel".
+ */
+ unsigned int channelIndex = activeChannels_[0];
+ getDelayedChannelIndex(imageMetadata, "prepare: no delayed status", channelIndex);
- if (!imageMetadata->get("agc.delayed_status", delayedStatus))
- totalExposureValue = delayedStatus.totalExposureValue;
-
- status_.digitalGain = 1.0;
- fetchAwbStatus(imageMetadata); /* always fetch it so that Process knows it's been done */
-
- if (status_.totalExposureValue) {
- /* Process has run, so we have meaningful values. */
- DeviceStatus deviceStatus;
- if (imageMetadata->get("device.status", deviceStatus) == 0) {
- Duration actualExposure = deviceStatus.shutterSpeed *
- deviceStatus.analogueGain;
- if (actualExposure) {
- status_.digitalGain = totalExposureValue / actualExposure;
- LOG(RPiAgc, Debug) << "Want total exposure " << totalExposureValue;
- /*
- * Never ask for a gain < 1.0, and also impose
- * some upper limit. Make it customisable?
- */
- status_.digitalGain = std::max(1.0, std::min(status_.digitalGain, 4.0));
- LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
- LOG(RPiAgc, Debug) << "Use digitalGain " << status_.digitalGain;
- LOG(RPiAgc, Debug) << "Effective exposure "
- << actualExposure * status_.digitalGain;
- /* Decide whether AEC/AGC has converged. */
- updateLockStatus(deviceStatus);
- }
- } else
- LOG(RPiAgc, Warning) << name() << ": no device metadata";
- imageMetadata->set("agc.status", status_);
- }
+ LOG(RPiAgc, Debug) << "prepare for channel " << channelIndex;
+ channelData_[channelIndex].channel.prepare(imageMetadata);
}
void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
- frameCount_++;
/*
- * First a little bit of housekeeping, fetching up-to-date settings and
- * configuration, that kind of thing.
+ * We want to generate values for the next channel in round robin fashion
+ * (i.e. the channel at location index_ in the activeChannel list), even though
+ * the statistics we have will be for a different channel (which we find
+ * again from the delayed status).
*/
- housekeepConfig();
- /* Get the current exposure values for the frame that's just arrived. */
- fetchCurrentExposure(imageMetadata);
- /* Compute the total gain we require relative to the current exposure. */
- double gain, targetY;
- computeGain(stats, imageMetadata, gain, targetY);
- /* Now compute the target (final) exposure which we think we want. */
- computeTargetExposure(gain);
- /*
- * Some of the exposure has to be applied as digital gain, so work out
- * what that is. This function also tells us whether it's decided to
- * "desaturate" the image more quickly.
- */
- bool desaturate = applyDigitalGain(gain, targetY);
- /* The results have to be filtered so as not to change too rapidly. */
- filterExposure(desaturate);
- /*
- * The last thing is to divide up the exposure value into a shutter time
- * and analogue gain, according to the current exposure mode.
- */
- divideUpExposure();
- /* Finally advertise what we've done. */
- writeAndFinish(imageMetadata, desaturate);
-}
-void Agc::updateLockStatus(DeviceStatus const &deviceStatus)
-{
- const double errorFactor = 0.10; /* make these customisable? */
- const int maxLockCount = 5;
- /* Reset "lock count" when we exceed this multiple of errorFactor */
- const double resetMargin = 1.5;
-
- /* Add 200us to the exposure time error to allow for line quantisation. */
- Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
- double gainError = lastDeviceStatus_.analogueGain * errorFactor;
- Duration targetError = lastTargetExposure_ * errorFactor;
+ /* Generate updated AGC values for channel for new channel that we are requesting. */
+ unsigned int channelIndex = activeChannels_[index_];
+ AgcChannelData &channelData = channelData_[channelIndex];
+ /* The stats that arrived with this image correspond to the following channel. */
+ unsigned int statsIndex = 0;
+ getDelayedChannelIndex(imageMetadata, "process: no delayed status for stats", statsIndex);
+ LOG(RPiAgc, Debug) << "process for channel " << channelIndex;
/*
- * Note that we don't know the exposure/gain limits of the sensor, so
- * the values we keep requesting may be unachievable. For this reason
- * we only insist that we're close to values in the past few frames.
+ * We keep a cache of the most recent DeviceStatus and stats for each channel,
+ * so that we can invoke the next channel's process method with the most up to date
+ * values.
*/
- if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
- deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
- deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
- deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
- status_.targetExposureValue > lastTargetExposure_ - targetError &&
- status_.targetExposureValue < lastTargetExposure_ + targetError)
- lockCount_ = std::min(lockCount_ + 1, maxLockCount);
- else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
- deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
- deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
- deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
- status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
- status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
- lockCount_ = 0;
-
- lastDeviceStatus_ = deviceStatus;
- lastTargetExposure_ = status_.targetExposureValue;
-
- LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
- status_.locked = lockCount_ == maxLockCount;
-}
-
-void Agc::housekeepConfig()
-{
- /* First fetch all the up-to-date settings, so no one else has to do it. */
- status_.ev = ev_;
- status_.fixedShutter = limitShutter(fixedShutter_);
- status_.fixedAnalogueGain = fixedAnalogueGain_;
- status_.flickerPeriod = flickerPeriod_;
- LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
- << status_.fixedShutter << " fixedAnalogueGain "
- << status_.fixedAnalogueGain;
- /*
- * Make sure the "mode" pointers point to the up-to-date things, if
- * they've changed.
- */
- if (meteringModeName_ != status_.meteringMode) {
- auto it = config_.meteringModes.find(meteringModeName_);
- if (it == config_.meteringModes.end()) {
- LOG(RPiAgc, Warning) << "No metering mode " << meteringModeName_;
- meteringModeName_ = status_.meteringMode;
- } else {
- meteringMode_ = &it->second;
- status_.meteringMode = meteringModeName_;
- }
- }
- if (exposureModeName_ != status_.exposureMode) {
- auto it = config_.exposureModes.find(exposureModeName_);
- if (it == config_.exposureModes.end()) {
- LOG(RPiAgc, Warning) << "No exposure profile " << exposureModeName_;
- exposureModeName_ = status_.exposureMode;
- } else {
- exposureMode_ = &it->second;
- status_.exposureMode = exposureModeName_;
- }
- }
- if (constraintModeName_ != status_.constraintMode) {
- auto it = config_.constraintModes.find(constraintModeName_);
- if (it == config_.constraintModes.end()) {
- LOG(RPiAgc, Warning) << "No constraint list " << constraintModeName_;
- constraintModeName_ = status_.constraintMode;
- } else {
- constraintMode_ = &it->second;
- status_.constraintMode = constraintModeName_;
- }
- }
- LOG(RPiAgc, Debug) << "exposureMode "
- << exposureModeName_ << " constraintMode "
- << constraintModeName_ << " meteringMode "
- << meteringModeName_;
-}
-
-void Agc::fetchCurrentExposure(Metadata *imageMetadata)
-{
- std::unique_lock<Metadata> lock(*imageMetadata);
- DeviceStatus *deviceStatus =
- imageMetadata->getLocked<DeviceStatus>("device.status");
- if (!deviceStatus)
- LOG(RPiAgc, Fatal) << "No device metadata";
- current_.shutter = deviceStatus->shutterSpeed;
- current_.analogueGain = deviceStatus->analogueGain;
- AgcStatus *agcStatus =
- imageMetadata->getLocked<AgcStatus>("agc.status");
- current_.totalExposure = agcStatus ? agcStatus->totalExposureValue : 0s;
- current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
-}
-
-void Agc::fetchAwbStatus(Metadata *imageMetadata)
-{
- awb_.gainR = 1.0; /* in case not found in metadata */
- awb_.gainG = 1.0;
- awb_.gainB = 1.0;
- if (imageMetadata->get("awb.status", awb_) != 0)
- LOG(RPiAgc, Debug) << "No AWB status found";
-}
-
-static double computeInitialY(StatisticsPtr &stats, AwbStatus const &awb,
- std::vector<double> &weights, double gain)
-{
- constexpr uint64_t maxVal = 1 << Statistics::NormalisationFactorPow2;
-
- ASSERT(weights.size() == stats->agcRegions.numRegions());
+ LOG(RPiAgc, Debug) << "Save DeviceStatus and stats for channel " << statsIndex;
+ DeviceStatus deviceStatus;
+ if (imageMetadata->get<DeviceStatus>("device.status", deviceStatus) == 0)
+ channelData_[statsIndex].deviceStatus = deviceStatus;
+ else
+ /* Every frame should have a DeviceStatus. */
+ LOG(RPiAgc, Error) << "process: no device status found";
+ channelData_[statsIndex].statistics = stats;
/*
- * Note that the weights are applied by the IPA to the statistics directly,
- * before they are given to us here.
+ * Finally fetch the most recent DeviceStatus and stats for the new channel, if both
+ * exist, and call process(). We must make the agc.status metadata record correctly
+ * which channel this is.
*/
- double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
- for (unsigned int i = 0; i < stats->agcRegions.numRegions(); i++) {
- auto ®ion = stats->agcRegions.get(i);
- rSum += std::min<double>(region.val.rSum * gain, (maxVal - 1) * region.counted);
- gSum += std::min<double>(region.val.gSum * gain, (maxVal - 1) * region.counted);
- bSum += std::min<double>(region.val.bSum * gain, (maxVal - 1) * region.counted);
- pixelSum += region.counted;
- }
- if (pixelSum == 0.0) {
- LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
- return 0;
- }
- double ySum = rSum * awb.gainR * .299 +
- gSum * awb.gainG * .587 +
- bSum * awb.gainB * .114;
- return ySum / pixelSum / maxVal;
-}
-
-/*
- * We handle extra gain through EV by adjusting our Y targets. However, you
- * simply can't monitor histograms once they get very close to (or beyond!)
- * saturation, so we clamp the Y targets to this value. It does mean that EV
- * increases don't necessarily do quite what you might expect in certain
- * (contrived) cases.
- */
-
-static constexpr double EvGainYTargetLimit = 0.9;
-
-static double constraintComputeGain(AgcConstraint &c, const Histogram &h, double lux,
- double evGain, double &targetY)
-{
- targetY = c.yTarget.eval(c.yTarget.domain().clip(lux));
- targetY = std::min(EvGainYTargetLimit, targetY * evGain);
- double iqm = h.interQuantileMean(c.qLo, c.qHi);
- return (targetY * h.bins()) / iqm;
-}
-
-void Agc::computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
- double &gain, double &targetY)
-{
- struct LuxStatus lux = {};
- lux.lux = 400; /* default lux level to 400 in case no metadata found */
- if (imageMetadata->get("lux.status", lux) != 0)
- LOG(RPiAgc, Warning) << "No lux level found";
- const Histogram &h = statistics->yHist;
- double evGain = status_.ev * config_.baseEv;
- /*
- * The initial gain and target_Y come from some of the regions. After
- * that we consider the histogram constraints.
- */
- targetY = config_.yTarget.eval(config_.yTarget.domain().clip(lux.lux));
- targetY = std::min(EvGainYTargetLimit, targetY * evGain);
-
- /*
- * Do this calculation a few times as brightness increase can be
- * non-linear when there are saturated regions.
- */
- gain = 1.0;
- for (int i = 0; i < 8; i++) {
- double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
- double extraGain = std::min(10.0, targetY / (initialY + .001));
- gain *= extraGain;
- LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
- << " gives gain " << gain;
- if (extraGain < 1.01) /* close enough */
- break;
- }
-
- for (auto &c : *constraintMode_) {
- double newTargetY;
- double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
- LOG(RPiAgc, Debug) << "Constraint has target_Y "
- << newTargetY << " giving gain " << newGain;
- if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
- LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
- gain = newGain;
- targetY = newTargetY;
- } else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
- LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
- gain = newGain;
- targetY = newTargetY;
- }
- }
- LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
- << status_.ev << " base_ev " << config_.baseEv
- << ")";
-}
-
-void Agc::computeTargetExposure(double gain)
-{
- if (status_.fixedShutter && status_.fixedAnalogueGain) {
- /*
- * When ag and shutter are both fixed, we need to drive the
- * total exposure so that we end up with a digital gain of at least
- * 1/minColourGain. Otherwise we'd desaturate channels causing
- * white to go cyan or magenta.
- */
- double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
- ASSERT(minColourGain != 0.0);
- target_.totalExposure =
- status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
+ if (channelData.statistics && channelData.deviceStatus) {
+ deviceStatus = *channelData.deviceStatus;
+ stats = channelData.statistics;
} else {
- /*
- * The statistics reflect the image without digital gain, so the final
- * total exposure we're aiming for is:
- */
- target_.totalExposure = current_.totalExposureNoDG * gain;
- /* The final target exposure is also limited to what the exposure mode allows. */
- Duration maxShutter = status_.fixedShutter
- ? status_.fixedShutter
- : exposureMode_->shutter.back();
- maxShutter = limitShutter(maxShutter);
- Duration maxTotalExposure =
- maxShutter *
- (status_.fixedAnalogueGain != 0.0
- ? status_.fixedAnalogueGain
- : exposureMode_->gain.back());
- target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
+ /* Can also happen when new channels start. */
+ LOG(RPiAgc, Debug) << "process: channel " << channelIndex << " not seen yet";
}
- LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
-}
-bool Agc::applyDigitalGain(double gain, double targetY)
-{
- double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
- ASSERT(minColourGain != 0.0);
- double dg = 1.0 / minColourGain;
- /*
- * I think this pipeline subtracts black level and rescales before we
- * get the stats, so no need to worry about it.
- */
- LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
- << " target_Y " << targetY;
- /*
- * Finally, if we're trying to reduce exposure but the target_Y is
- * "close" to 1.0, then the gain computed for that constraint will be
- * only slightly less than one, because the measured Y can never be
- * larger than 1.0. When this happens, demand a large digital gain so
- * that the exposure can be reduced, de-saturating the image much more
- * quickly (and we then approach the correct value more quickly from
- * below).
- */
- bool desaturate = targetY > config_.fastReduceThreshold &&
- gain < sqrt(targetY);
- if (desaturate)
- dg /= config_.fastReduceThreshold;
- LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
- target_.totalExposureNoDG = target_.totalExposure / dg;
- LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << target_.totalExposureNoDG;
- return desaturate;
-}
+ channelData.channel.process(stats, deviceStatus, imageMetadata, channelTotalExposures_);
+ auto dur = setCurrentChannelIndexGetExposure(imageMetadata, "process: no AGC status found",
+ channelIndex);
+ if (dur)
+ channelTotalExposures_[channelIndex] = dur;
-void Agc::filterExposure(bool desaturate)
-{
- double speed = config_.speed;
- /*
- * AGC adapts instantly if both shutter and gain are directly specified
- * or we're in the startup phase.
- */
- if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
- frameCount_ <= config_.startupFrames)
- speed = 1.0;
- if (!filtered_.totalExposure) {
- filtered_.totalExposure = target_.totalExposure;
- filtered_.totalExposureNoDG = target_.totalExposureNoDG;
- } else {
- /*
- * If close to the result go faster, to save making so many
- * micro-adjustments on the way. (Make this customisable?)
- */
- if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
- filtered_.totalExposure > 0.8 * target_.totalExposure)
- speed = sqrt(speed);
- filtered_.totalExposure = speed * target_.totalExposure +
- filtered_.totalExposure * (1.0 - speed);
- /*
- * When desaturing, take a big jump down in totalExposureNoDG,
- * which we'll hide with digital gain.
- */
- if (desaturate)
- filtered_.totalExposureNoDG =
- target_.totalExposureNoDG;
- else
- filtered_.totalExposureNoDG =
- speed * target_.totalExposureNoDG +
- filtered_.totalExposureNoDG * (1.0 - speed);
- }
- /*
- * We can't let the totalExposureNoDG exposure deviate too far below the
- * total exposure, as there might not be enough digital gain available
- * in the ISP to hide it (which will cause nasty oscillation).
- */
- if (filtered_.totalExposureNoDG <
- filtered_.totalExposure * config_.fastReduceThreshold)
- filtered_.totalExposureNoDG = filtered_.totalExposure * config_.fastReduceThreshold;
- LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
- << " no dg " << filtered_.totalExposureNoDG;
-}
-
-void Agc::divideUpExposure()
-{
- /*
- * Sending the fixed shutter/gain cases through the same code may seem
- * unnecessary, but it will make more sense when extend this to cover
- * variable aperture.
- */
- Duration exposureValue = filtered_.totalExposureNoDG;
- Duration shutterTime;
- double analogueGain;
- shutterTime = status_.fixedShutter ? status_.fixedShutter
- : exposureMode_->shutter[0];
- shutterTime = limitShutter(shutterTime);
- analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
- : exposureMode_->gain[0];
- analogueGain = limitGain(analogueGain);
- if (shutterTime * analogueGain < exposureValue) {
- for (unsigned int stage = 1;
- stage < exposureMode_->gain.size(); stage++) {
- if (!status_.fixedShutter) {
- Duration stageShutter =
- limitShutter(exposureMode_->shutter[stage]);
- if (stageShutter * analogueGain >= exposureValue) {
- shutterTime = exposureValue / analogueGain;
- break;
- }
- shutterTime = stageShutter;
- }
- if (status_.fixedAnalogueGain == 0.0) {
- if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
- analogueGain = exposureValue / shutterTime;
- break;
- }
- analogueGain = exposureMode_->gain[stage];
- analogueGain = limitGain(analogueGain);
- }
- }
- }
- LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
- << analogueGain;
- /*
- * Finally adjust shutter time for flicker avoidance (require both
- * shutter and gain not to be fixed).
- */
- if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
- status_.flickerPeriod) {
- int flickerPeriods = shutterTime / status_.flickerPeriod;
- if (flickerPeriods) {
- Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
- analogueGain *= shutterTime / newShutterTime;
- /*
- * We should still not allow the ag to go over the
- * largest value in the exposure mode. Note that this
- * may force more of the total exposure into the digital
- * gain as a side-effect.
- */
- analogueGain = std::min(analogueGain, exposureMode_->gain.back());
- analogueGain = limitGain(analogueGain);
- shutterTime = newShutterTime;
- }
- LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
- << shutterTime << " gain " << analogueGain;
- }
- filtered_.shutter = shutterTime;
- filtered_.analogueGain = analogueGain;
-}
-
-void Agc::writeAndFinish(Metadata *imageMetadata, bool desaturate)
-{
- status_.totalExposureValue = filtered_.totalExposure;
- status_.targetExposureValue = desaturate ? 0s : target_.totalExposureNoDG;
- status_.shutterTime = filtered_.shutter;
- status_.analogueGain = filtered_.analogueGain;
- /*
- * Write to metadata as well, in case anyone wants to update the camera
- * immediately.
- */
- imageMetadata->set("agc.status", status_);
- LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
- << filtered_.totalExposure;
- LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
- << " analogue gain " << filtered_.analogueGain;
-}
-
-Duration Agc::limitShutter(Duration shutter)
-{
- /*
- * shutter == 0 is a special case for fixed shutter values, and must pass
- * through unchanged
- */
- if (!shutter)
- return shutter;
-
- shutter = std::clamp(shutter, mode_.minShutter, maxShutter_);
- return shutter;
-}
-
-double Agc::limitGain(double gain) const
-{
- /*
- * Only limit the lower bounds of the gain value to what the sensor limits.
- * The upper bound on analogue gain will be made up with additional digital
- * gain applied by the ISP.
- *
- * gain == 0.0 is a special case for fixed shutter values, and must pass
- * through unchanged
- */
- if (!gain)
- return gain;
-
- gain = std::max(gain, mode_.minAnalogueGain);
- return gain;
+ /* And onto the next channel for the next call. */
+ index_ = (index_ + 1) % activeChannels_.size();
}
/* Register algorithm with the system. */
diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h
index 939f972..9089043 100644
--- a/src/ipa/rpi/controller/rpi/agc.h
+++ b/src/ipa/rpi/controller/rpi/agc.h
@@ -6,60 +6,20 @@
*/
#pragma once
+#include <optional>
+#include <string>
#include <vector>
-#include <mutex>
-
-#include <libcamera/base/utils.h>
#include "../agc_algorithm.h"
-#include "../agc_status.h"
-#include "../pwl.h"
-/* This is our implementation of AGC. */
+#include "agc_channel.h"
namespace RPiController {
-struct AgcMeteringMode {
- std::vector<double> weights;
- int read(const libcamera::YamlObject ¶ms);
-};
-
-struct AgcExposureMode {
- std::vector<libcamera::utils::Duration> shutter;
- std::vector<double> gain;
- int read(const libcamera::YamlObject ¶ms);
-};
-
-struct AgcConstraint {
- enum class Bound { LOWER = 0, UPPER = 1 };
- Bound bound;
- double qLo;
- double qHi;
- Pwl yTarget;
- int read(const libcamera::YamlObject ¶ms);
-};
-
-typedef std::vector<AgcConstraint> AgcConstraintMode;
-
-struct AgcConfig {
- int read(const libcamera::YamlObject ¶ms);
- std::map<std::string, AgcMeteringMode> meteringModes;
- std::map<std::string, AgcExposureMode> exposureModes;
- std::map<std::string, AgcConstraintMode> constraintModes;
- Pwl yTarget;
- double speed;
- uint16_t startupFrames;
- unsigned int convergenceFrames;
- double maxChange;
- double minChange;
- double fastReduceThreshold;
- double speedUpThreshold;
- std::string defaultMeteringMode;
- std::string defaultExposureMode;
- std::string defaultConstraintMode;
- double baseEv;
- libcamera::utils::Duration defaultExposureTime;
- double defaultAnalogueGain;
+struct AgcChannelData {
+ AgcChannel channel;
+ std::optional<DeviceStatus> deviceStatus;
+ StatisticsPtr statistics;
};
class Agc : public AgcAlgorithm
@@ -70,65 +30,32 @@
int read(const libcamera::YamlObject ¶ms) override;
unsigned int getConvergenceFrames() const override;
std::vector<double> const &getWeights() const override;
- void setEv(double ev) override;
- void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) override;
+ void setEv(unsigned int channel, double ev) override;
+ void setFlickerPeriod(unsigned int channelIndex,
+ libcamera::utils::Duration flickerPeriod) override;
void setMaxShutter(libcamera::utils::Duration maxShutter) override;
- void setFixedShutter(libcamera::utils::Duration fixedShutter) override;
- void setFixedAnalogueGain(double fixedAnalogueGain) override;
+ void setFixedShutter(unsigned int channelIndex,
+ libcamera::utils::Duration fixedShutter) override;
+ void setFixedAnalogueGain(unsigned int channelIndex,
+ double fixedAnalogueGain) override;
void setMeteringMode(std::string const &meteringModeName) override;
- void setExposureMode(std::string const &exposureModeName) override;
- void setConstraintMode(std::string const &contraintModeName) override;
- void enableAuto() override;
- void disableAuto() override;
+ void setExposureMode(unsigned int channelIndex,
+ std::string const &exposureModeName) override;
+ void setConstraintMode(unsigned int channelIndex,
+ std::string const &contraintModeName) override;
+ void enableAuto(unsigned int channelIndex) override;
+ void disableAuto(unsigned int channelIndex) override;
void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
void prepare(Metadata *imageMetadata) override;
void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
+ void setActiveChannels(const std::vector<unsigned int> &activeChannels) override;
private:
- void updateLockStatus(DeviceStatus const &deviceStatus);
- AgcConfig config_;
- void housekeepConfig();
- void fetchCurrentExposure(Metadata *imageMetadata);
- void fetchAwbStatus(Metadata *imageMetadata);
- void computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
- double &gain, double &targetY);
- void computeTargetExposure(double gain);
- bool applyDigitalGain(double gain, double targetY);
- void filterExposure(bool desaturate);
- void divideUpExposure();
- void writeAndFinish(Metadata *imageMetadata, bool desaturate);
- libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter);
- double limitGain(double gain) const;
- AgcMeteringMode *meteringMode_;
- AgcExposureMode *exposureMode_;
- AgcConstraintMode *constraintMode_;
- CameraMode mode_;
- uint64_t frameCount_;
- AwbStatus awb_;
- struct ExposureValues {
- ExposureValues();
-
- libcamera::utils::Duration shutter;
- double analogueGain;
- libcamera::utils::Duration totalExposure;
- libcamera::utils::Duration totalExposureNoDG; /* without digital gain */
- };
- ExposureValues current_; /* values for the current frame */
- ExposureValues target_; /* calculate the values we want here */
- ExposureValues filtered_; /* these values are filtered towards target */
- AgcStatus status_;
- int lockCount_;
- DeviceStatus lastDeviceStatus_;
- libcamera::utils::Duration lastTargetExposure_;
- /* Below here the "settings" that applications can change. */
- std::string meteringModeName_;
- std::string exposureModeName_;
- std::string constraintModeName_;
- double ev_;
- libcamera::utils::Duration flickerPeriod_;
- libcamera::utils::Duration maxShutter_;
- libcamera::utils::Duration fixedShutter_;
- double fixedAnalogueGain_;
+ int checkChannel(unsigned int channel) const;
+ std::vector<AgcChannelData> channelData_;
+ std::vector<unsigned int> activeChannels_;
+ unsigned int index_; /* index into the activeChannels_ */
+ AgcChannelTotalExposures channelTotalExposures_;
};
} /* namespace RPiController */
diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp
new file mode 100644
index 0000000..3957dbc
--- /dev/null
+++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp
@@ -0,0 +1,1010 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2023, Raspberry Pi Ltd
+ *
+ * agc_channel.cpp - AGC/AEC control algorithm
+ */
+
+#include "agc_channel.h"
+
+#include <algorithm>
+#include <tuple>
+
+#include <libcamera/base/log.h>
+
+#include "../awb_status.h"
+#include "../device_status.h"
+#include "../histogram.h"
+#include "../lux_status.h"
+#include "../metadata.h"
+
+using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+using namespace std::literals::chrono_literals;
+
+LOG_DECLARE_CATEGORY(RPiAgc)
+
+int AgcMeteringMode::read(const libcamera::YamlObject ¶ms)
+{
+ const YamlObject &yamlWeights = params["weights"];
+
+ for (const auto &p : yamlWeights.asList()) {
+ auto value = p.get<double>();
+ if (!value)
+ return -EINVAL;
+ weights.push_back(*value);
+ }
+
+ return 0;
+}
+
+static std::tuple<int, std::string>
+readMeteringModes(std::map<std::string, AgcMeteringMode> &metering_modes,
+ const libcamera::YamlObject ¶ms)
+{
+ std::string first;
+ int ret;
+
+ for (const auto &[key, value] : params.asDict()) {
+ AgcMeteringMode meteringMode;
+ ret = meteringMode.read(value);
+ if (ret)
+ return { ret, {} };
+
+ metering_modes[key] = std::move(meteringMode);
+ if (first.empty())
+ first = key;
+ }
+
+ return { 0, first };
+}
+
+int AgcExposureMode::read(const libcamera::YamlObject ¶ms)
+{
+ auto value = params["shutter"].getList<double>();
+ if (!value)
+ return -EINVAL;
+ std::transform(value->begin(), value->end(), std::back_inserter(shutter),
+ [](double v) { return v * 1us; });
+
+ value = params["gain"].getList<double>();
+ if (!value)
+ return -EINVAL;
+ gain = std::move(*value);
+
+ if (shutter.size() < 2 || gain.size() < 2) {
+ LOG(RPiAgc, Error)
+ << "AgcExposureMode: must have at least two entries in exposure profile";
+ return -EINVAL;
+ }
+
+ if (shutter.size() != gain.size()) {
+ LOG(RPiAgc, Error)
+ << "AgcExposureMode: expect same number of exposure and gain entries in exposure profile";
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+static std::tuple<int, std::string>
+readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
+ const libcamera::YamlObject ¶ms)
+{
+ std::string first;
+ int ret;
+
+ for (const auto &[key, value] : params.asDict()) {
+ AgcExposureMode exposureMode;
+ ret = exposureMode.read(value);
+ if (ret)
+ return { ret, {} };
+
+ exposureModes[key] = std::move(exposureMode);
+ if (first.empty())
+ first = key;
+ }
+
+ return { 0, first };
+}
+
+int AgcConstraint::read(const libcamera::YamlObject ¶ms)
+{
+ std::string boundString = params["bound"].get<std::string>("");
+ transform(boundString.begin(), boundString.end(),
+ boundString.begin(), ::toupper);
+ if (boundString != "UPPER" && boundString != "LOWER") {
+ LOG(RPiAgc, Error) << "AGC constraint type should be UPPER or LOWER";
+ return -EINVAL;
+ }
+ bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
+
+ auto value = params["q_lo"].get<double>();
+ if (!value)
+ return -EINVAL;
+ qLo = *value;
+
+ value = params["q_hi"].get<double>();
+ if (!value)
+ return -EINVAL;
+ qHi = *value;
+
+ return yTarget.read(params["y_target"]);
+}
+
+static std::tuple<int, AgcConstraintMode>
+readConstraintMode(const libcamera::YamlObject ¶ms)
+{
+ AgcConstraintMode mode;
+ int ret;
+
+ for (const auto &p : params.asList()) {
+ AgcConstraint constraint;
+ ret = constraint.read(p);
+ if (ret)
+ return { ret, {} };
+
+ mode.push_back(std::move(constraint));
+ }
+
+ return { 0, mode };
+}
+
+static std::tuple<int, std::string>
+readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
+ const libcamera::YamlObject ¶ms)
+{
+ std::string first;
+ int ret;
+
+ for (const auto &[key, value] : params.asDict()) {
+ std::tie(ret, constraintModes[key]) = readConstraintMode(value);
+ if (ret)
+ return { ret, {} };
+
+ if (first.empty())
+ first = key;
+ }
+
+ return { 0, first };
+}
+
+int AgcChannelConstraint::read(const libcamera::YamlObject ¶ms)
+{
+ auto channelValue = params["channel"].get<unsigned int>();
+ if (!channelValue) {
+ LOG(RPiAgc, Error) << "AGC channel constraint must have a channel";
+ return -EINVAL;
+ }
+ channel = *channelValue;
+
+ std::string boundString = params["bound"].get<std::string>("");
+ transform(boundString.begin(), boundString.end(),
+ boundString.begin(), ::toupper);
+ if (boundString != "UPPER" && boundString != "LOWER") {
+ LOG(RPiAgc, Error) << "AGC channel constraint type should be UPPER or LOWER";
+ return -EINVAL;
+ }
+ bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
+
+ auto factorValue = params["factor"].get<double>();
+ if (!factorValue) {
+ LOG(RPiAgc, Error) << "AGC channel constraint must have a factor";
+ return -EINVAL;
+ }
+ factor = *factorValue;
+
+ return 0;
+}
+
+static int readChannelConstraints(std::vector<AgcChannelConstraint> &channelConstraints,
+ const libcamera::YamlObject ¶ms)
+{
+ for (const auto &p : params.asList()) {
+ AgcChannelConstraint constraint;
+ int ret = constraint.read(p);
+ if (ret)
+ return ret;
+
+ channelConstraints.push_back(constraint);
+ }
+
+ return 0;
+}
+
+int AgcConfig::read(const libcamera::YamlObject ¶ms)
+{
+ LOG(RPiAgc, Debug) << "AgcConfig";
+ int ret;
+
+ std::tie(ret, defaultMeteringMode) =
+ readMeteringModes(meteringModes, params["metering_modes"]);
+ if (ret)
+ return ret;
+ std::tie(ret, defaultExposureMode) =
+ readExposureModes(exposureModes, params["exposure_modes"]);
+ if (ret)
+ return ret;
+ std::tie(ret, defaultConstraintMode) =
+ readConstraintModes(constraintModes, params["constraint_modes"]);
+ if (ret)
+ return ret;
+
+ if (params.contains("channel_constraints")) {
+ ret = readChannelConstraints(channelConstraints, params["channel_constraints"]);
+ if (ret)
+ return ret;
+ }
+
+ ret = yTarget.read(params["y_target"]);
+ if (ret)
+ return ret;
+
+ speed = params["speed"].get<double>(0.2);
+ startupFrames = params["startup_frames"].get<uint16_t>(10);
+ convergenceFrames = params["convergence_frames"].get<unsigned int>(6);
+ fastReduceThreshold = params["fast_reduce_threshold"].get<double>(0.4);
+ baseEv = params["base_ev"].get<double>(1.0);
+
+ /* Start with quite a low value as ramping up is easier than ramping down. */
+ defaultExposureTime = params["default_exposure_time"].get<double>(1000) * 1us;
+ defaultAnalogueGain = params["default_analogue_gain"].get<double>(1.0);
+
+ return 0;
+}
+
+AgcChannel::ExposureValues::ExposureValues()
+ : shutter(0s), analogueGain(0),
+ totalExposure(0s), totalExposureNoDG(0s)
+{
+}
+
+AgcChannel::AgcChannel()
+ : meteringMode_(nullptr), exposureMode_(nullptr), constraintMode_(nullptr),
+ frameCount_(0), lockCount_(0),
+ lastTargetExposure_(0s), ev_(1.0), flickerPeriod_(0s),
+ maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
+{
+ memset(&awb_, 0, sizeof(awb_));
+ /*
+ * Setting status_.totalExposureValue_ to zero initially tells us
+ * it's not been calculated yet (i.e. Process hasn't yet run).
+ */
+ status_ = {};
+ status_.ev = ev_;
+}
+
+int AgcChannel::read(const libcamera::YamlObject ¶ms,
+ const Controller::HardwareConfig &hardwareConfig)
+{
+ int ret = config_.read(params);
+ if (ret)
+ return ret;
+
+ const Size &size = hardwareConfig.agcZoneWeights;
+ for (auto const &modes : config_.meteringModes) {
+ if (modes.second.weights.size() != size.width * size.height) {
+ LOG(RPiAgc, Error) << "AgcMeteringMode: Incorrect number of weights";
+ return -EINVAL;
+ }
+ }
+
+ /*
+ * Set the config's defaults (which are the first ones it read) as our
+ * current modes, until someone changes them. (they're all known to
+ * exist at this point)
+ */
+ meteringModeName_ = config_.defaultMeteringMode;
+ meteringMode_ = &config_.meteringModes[meteringModeName_];
+ exposureModeName_ = config_.defaultExposureMode;
+ exposureMode_ = &config_.exposureModes[exposureModeName_];
+ constraintModeName_ = config_.defaultConstraintMode;
+ constraintMode_ = &config_.constraintModes[constraintModeName_];
+ /* Set up the "last shutter/gain" values, in case AGC starts "disabled". */
+ status_.shutterTime = config_.defaultExposureTime;
+ status_.analogueGain = config_.defaultAnalogueGain;
+ return 0;
+}
+
+void AgcChannel::disableAuto()
+{
+ fixedShutter_ = status_.shutterTime;
+ fixedAnalogueGain_ = status_.analogueGain;
+}
+
+void AgcChannel::enableAuto()
+{
+ fixedShutter_ = 0s;
+ fixedAnalogueGain_ = 0;
+}
+
+unsigned int AgcChannel::getConvergenceFrames() const
+{
+ /*
+ * If shutter and gain have been explicitly set, there is no
+ * convergence to happen, so no need to drop any frames - return zero.
+ */
+ if (fixedShutter_ && fixedAnalogueGain_)
+ return 0;
+ else
+ return config_.convergenceFrames;
+}
+
+std::vector<double> const &AgcChannel::getWeights() const
+{
+ /*
+ * In case someone calls setMeteringMode and then this before the
+ * algorithm has run and updated the meteringMode_ pointer.
+ */
+ auto it = config_.meteringModes.find(meteringModeName_);
+ if (it == config_.meteringModes.end())
+ return meteringMode_->weights;
+ return it->second.weights;
+}
+
+void AgcChannel::setEv(double ev)
+{
+ ev_ = ev;
+}
+
+void AgcChannel::setFlickerPeriod(Duration flickerPeriod)
+{
+ flickerPeriod_ = flickerPeriod;
+}
+
+void AgcChannel::setMaxShutter(Duration maxShutter)
+{
+ maxShutter_ = maxShutter;
+}
+
+void AgcChannel::setFixedShutter(Duration fixedShutter)
+{
+ fixedShutter_ = fixedShutter;
+ /* Set this in case someone calls disableAuto() straight after. */
+ status_.shutterTime = limitShutter(fixedShutter_);
+}
+
+void AgcChannel::setFixedAnalogueGain(double fixedAnalogueGain)
+{
+ fixedAnalogueGain_ = fixedAnalogueGain;
+ /* Set this in case someone calls disableAuto() straight after. */
+ status_.analogueGain = limitGain(fixedAnalogueGain);
+}
+
+void AgcChannel::setMeteringMode(std::string const &meteringModeName)
+{
+ meteringModeName_ = meteringModeName;
+}
+
+void AgcChannel::setExposureMode(std::string const &exposureModeName)
+{
+ exposureModeName_ = exposureModeName;
+}
+
+void AgcChannel::setConstraintMode(std::string const &constraintModeName)
+{
+ constraintModeName_ = constraintModeName;
+}
+
+void AgcChannel::switchMode(CameraMode const &cameraMode,
+ Metadata *metadata)
+{
+ /* AGC expects the mode sensitivity always to be non-zero. */
+ ASSERT(cameraMode.sensitivity);
+
+ housekeepConfig();
+
+ /*
+ * Store the mode in the local state. We must cache the sensitivity of
+ * of the previous mode for the calculations below.
+ */
+ double lastSensitivity = mode_.sensitivity;
+ mode_ = cameraMode;
+
+ Duration fixedShutter = limitShutter(fixedShutter_);
+ if (fixedShutter && fixedAnalogueGain_) {
+ /* We're going to reset the algorithm here with these fixed values. */
+
+ fetchAwbStatus(metadata);
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
+
+ /* This is the equivalent of computeTargetExposure and applyDigitalGain. */
+ target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
+ target_.totalExposure = target_.totalExposureNoDG / minColourGain;
+
+ /* Equivalent of filterExposure. This resets any "history". */
+ filtered_ = target_;
+
+ /* Equivalent of divideUpExposure. */
+ filtered_.shutter = fixedShutter;
+ filtered_.analogueGain = fixedAnalogueGain_;
+ } else if (status_.totalExposureValue) {
+ /*
+ * On a mode switch, various things could happen:
+ * - the exposure profile might change
+ * - a fixed exposure or gain might be set
+ * - the new mode's sensitivity might be different
+ * We cope with the last of these by scaling the target values. After
+ * that we just need to re-divide the exposure/gain according to the
+ * current exposure profile, which takes care of everything else.
+ */
+
+ double ratio = lastSensitivity / cameraMode.sensitivity;
+ target_.totalExposureNoDG *= ratio;
+ target_.totalExposure *= ratio;
+ filtered_.totalExposureNoDG *= ratio;
+ filtered_.totalExposure *= ratio;
+
+ divideUpExposure();
+ } else {
+ /*
+ * We come through here on startup, when at least one of the shutter
+ * or gain has not been fixed. We must still write those values out so
+ * that they will be applied immediately. We supply some arbitrary defaults
+ * for any that weren't set.
+ */
+
+ /* Equivalent of divideUpExposure. */
+ filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
+ filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
+ }
+
+ writeAndFinish(metadata, false);
+}
+
+void AgcChannel::prepare(Metadata *imageMetadata)
+{
+ Duration totalExposureValue = status_.totalExposureValue;
+ AgcStatus delayedStatus;
+ AgcPrepareStatus prepareStatus;
+
+ if (!imageMetadata->get("agc.delayed_status", delayedStatus))
+ totalExposureValue = delayedStatus.totalExposureValue;
+
+ prepareStatus.digitalGain = 1.0;
+ prepareStatus.locked = false;
+
+ if (status_.totalExposureValue) {
+ /* Process has run, so we have meaningful values. */
+ DeviceStatus deviceStatus;
+ if (imageMetadata->get("device.status", deviceStatus) == 0) {
+ Duration actualExposure = deviceStatus.shutterSpeed *
+ deviceStatus.analogueGain;
+ if (actualExposure) {
+ double digitalGain = totalExposureValue / actualExposure;
+ LOG(RPiAgc, Debug) << "Want total exposure " << totalExposureValue;
+ /*
+ * Never ask for a gain < 1.0, and also impose
+ * some upper limit. Make it customisable?
+ */
+ prepareStatus.digitalGain = std::max(1.0, std::min(digitalGain, 4.0));
+ LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
+ LOG(RPiAgc, Debug) << "Use digitalGain " << prepareStatus.digitalGain;
+ LOG(RPiAgc, Debug) << "Effective exposure "
+ << actualExposure * prepareStatus.digitalGain;
+ /* Decide whether AEC/AGC has converged. */
+ prepareStatus.locked = updateLockStatus(deviceStatus);
+ }
+ } else
+ LOG(RPiAgc, Warning) << "AgcChannel: no device metadata";
+ imageMetadata->set("agc.prepare_status", prepareStatus);
+ }
+}
+
+void AgcChannel::process(StatisticsPtr &stats, DeviceStatus const &deviceStatus,
+ Metadata *imageMetadata,
+ const AgcChannelTotalExposures &channelTotalExposures)
+{
+ frameCount_++;
+ /*
+ * First a little bit of housekeeping, fetching up-to-date settings and
+ * configuration, that kind of thing.
+ */
+ housekeepConfig();
+ /* Fetch the AWB status immediately, so that we can assume it's there. */
+ fetchAwbStatus(imageMetadata);
+ /* Get the current exposure values for the frame that's just arrived. */
+ fetchCurrentExposure(deviceStatus);
+ /* Compute the total gain we require relative to the current exposure. */
+ double gain, targetY;
+ computeGain(stats, imageMetadata, gain, targetY);
+ /* Now compute the target (final) exposure which we think we want. */
+ computeTargetExposure(gain);
+ /* The results have to be filtered so as not to change too rapidly. */
+ filterExposure();
+ /*
+ * We may be asked to limit the exposure using other channels. If another channel
+ * determines our upper bound we may want to know this later.
+ */
+ bool channelBound = applyChannelConstraints(channelTotalExposures);
+ /*
+ * Some of the exposure has to be applied as digital gain, so work out
+ * what that is. It also tells us whether it's trying to desaturate the image
+ * more quickly, which can only happen when another channel is not limiting us.
+ */
+ bool desaturate = applyDigitalGain(gain, targetY, channelBound);
+ /*
+ * The last thing is to divide up the exposure value into a shutter time
+ * and analogue gain, according to the current exposure mode.
+ */
+ divideUpExposure();
+ /* Finally advertise what we've done. */
+ writeAndFinish(imageMetadata, desaturate);
+}
+
+bool AgcChannel::updateLockStatus(DeviceStatus const &deviceStatus)
+{
+ const double errorFactor = 0.10; /* make these customisable? */
+ const int maxLockCount = 5;
+ /* Reset "lock count" when we exceed this multiple of errorFactor */
+ const double resetMargin = 1.5;
+
+ /* Add 200us to the exposure time error to allow for line quantisation. */
+ Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
+ double gainError = lastDeviceStatus_.analogueGain * errorFactor;
+ Duration targetError = lastTargetExposure_ * errorFactor;
+
+ /*
+ * Note that we don't know the exposure/gain limits of the sensor, so
+ * the values we keep requesting may be unachievable. For this reason
+ * we only insist that we're close to values in the past few frames.
+ */
+ if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
+ deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
+ deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
+ deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
+ status_.targetExposureValue > lastTargetExposure_ - targetError &&
+ status_.targetExposureValue < lastTargetExposure_ + targetError)
+ lockCount_ = std::min(lockCount_ + 1, maxLockCount);
+ else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
+ deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
+ deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
+ deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
+ status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
+ status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
+ lockCount_ = 0;
+
+ lastDeviceStatus_ = deviceStatus;
+ lastTargetExposure_ = status_.targetExposureValue;
+
+ LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
+ return lockCount_ == maxLockCount;
+}
+
+void AgcChannel::housekeepConfig()
+{
+ /* First fetch all the up-to-date settings, so no one else has to do it. */
+ status_.ev = ev_;
+ status_.fixedShutter = limitShutter(fixedShutter_);
+ status_.fixedAnalogueGain = fixedAnalogueGain_;
+ status_.flickerPeriod = flickerPeriod_;
+ LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
+ << status_.fixedShutter << " fixedAnalogueGain "
+ << status_.fixedAnalogueGain;
+ /*
+ * Make sure the "mode" pointers point to the up-to-date things, if
+ * they've changed.
+ */
+ if (meteringModeName_ != status_.meteringMode) {
+ auto it = config_.meteringModes.find(meteringModeName_);
+ if (it == config_.meteringModes.end()) {
+ LOG(RPiAgc, Warning) << "No metering mode " << meteringModeName_;
+ meteringModeName_ = status_.meteringMode;
+ } else {
+ meteringMode_ = &it->second;
+ status_.meteringMode = meteringModeName_;
+ }
+ }
+ if (exposureModeName_ != status_.exposureMode) {
+ auto it = config_.exposureModes.find(exposureModeName_);
+ if (it == config_.exposureModes.end()) {
+ LOG(RPiAgc, Warning) << "No exposure profile " << exposureModeName_;
+ exposureModeName_ = status_.exposureMode;
+ } else {
+ exposureMode_ = &it->second;
+ status_.exposureMode = exposureModeName_;
+ }
+ }
+ if (constraintModeName_ != status_.constraintMode) {
+ auto it = config_.constraintModes.find(constraintModeName_);
+ if (it == config_.constraintModes.end()) {
+ LOG(RPiAgc, Warning) << "No constraint list " << constraintModeName_;
+ constraintModeName_ = status_.constraintMode;
+ } else {
+ constraintMode_ = &it->second;
+ status_.constraintMode = constraintModeName_;
+ }
+ }
+ LOG(RPiAgc, Debug) << "exposureMode "
+ << exposureModeName_ << " constraintMode "
+ << constraintModeName_ << " meteringMode "
+ << meteringModeName_;
+}
+
+void AgcChannel::fetchCurrentExposure(DeviceStatus const &deviceStatus)
+{
+ current_.shutter = deviceStatus.shutterSpeed;
+ current_.analogueGain = deviceStatus.analogueGain;
+ current_.totalExposure = 0s; /* this value is unused */
+ current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
+}
+
+void AgcChannel::fetchAwbStatus(Metadata *imageMetadata)
+{
+ awb_.gainR = 1.0; /* in case not found in metadata */
+ awb_.gainG = 1.0;
+ awb_.gainB = 1.0;
+ if (imageMetadata->get("awb.status", awb_) != 0)
+ LOG(RPiAgc, Debug) << "No AWB status found";
+}
+
+static double computeInitialY(StatisticsPtr &stats, AwbStatus const &awb,
+ std::vector<double> &weights, double gain)
+{
+ constexpr uint64_t maxVal = 1 << Statistics::NormalisationFactorPow2;
+
+ /*
+ * If we have no AGC region stats, but do have a a Y histogram, use that
+ * directly to caluclate the mean Y value of the image.
+ */
+ if (!stats->agcRegions.numRegions() && stats->yHist.bins()) {
+ /*
+ * When the gain is applied to the histogram, anything below minBin
+ * will scale up directly with the gain, but anything above that
+ * will saturate into the top bin.
+ */
+ auto &hist = stats->yHist;
+ double minBin = std::min(1.0, 1.0 / gain) * hist.bins();
+ double binMean = hist.interBinMean(0.0, minBin);
+ double numUnsaturated = hist.cumulativeFreq(minBin);
+ /* This term is from all the pixels that won't saturate. */
+ double ySum = binMean * gain * numUnsaturated;
+ /* And add the ones that will saturate. */
+ ySum += (hist.total() - numUnsaturated) * hist.bins();
+ return ySum / hist.total() / hist.bins();
+ }
+
+ ASSERT(weights.size() == stats->agcRegions.numRegions());
+
+ /*
+ * Note that the weights are applied by the IPA to the statistics directly,
+ * before they are given to us here.
+ */
+ double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
+ for (unsigned int i = 0; i < stats->agcRegions.numRegions(); i++) {
+ auto ®ion = stats->agcRegions.get(i);
+ rSum += std::min<double>(region.val.rSum * gain, (maxVal - 1) * region.counted);
+ gSum += std::min<double>(region.val.gSum * gain, (maxVal - 1) * region.counted);
+ bSum += std::min<double>(region.val.bSum * gain, (maxVal - 1) * region.counted);
+ pixelSum += region.counted;
+ }
+ if (pixelSum == 0.0) {
+ LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
+ return 0;
+ }
+
+ double ySum;
+ /* Factor in the AWB correction if needed. */
+ if (stats->agcStatsPos == Statistics::AgcStatsPos::PreWb) {
+ ySum = rSum * awb.gainR * .299 +
+ gSum * awb.gainG * .587 +
+ bSum * awb.gainB * .114;
+ } else
+ ySum = rSum * .299 + gSum * .587 + bSum * .114;
+
+ return ySum / pixelSum / (1 << 16);
+}
+
+/*
+ * We handle extra gain through EV by adjusting our Y targets. However, you
+ * simply can't monitor histograms once they get very close to (or beyond!)
+ * saturation, so we clamp the Y targets to this value. It does mean that EV
+ * increases don't necessarily do quite what you might expect in certain
+ * (contrived) cases.
+ */
+
+static constexpr double EvGainYTargetLimit = 0.9;
+
+static double constraintComputeGain(AgcConstraint &c, const Histogram &h, double lux,
+ double evGain, double &targetY)
+{
+ targetY = c.yTarget.eval(c.yTarget.domain().clip(lux));
+ targetY = std::min(EvGainYTargetLimit, targetY * evGain);
+ double iqm = h.interQuantileMean(c.qLo, c.qHi);
+ return (targetY * h.bins()) / iqm;
+}
+
+void AgcChannel::computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
+ double &gain, double &targetY)
+{
+ struct LuxStatus lux = {};
+ lux.lux = 400; /* default lux level to 400 in case no metadata found */
+ if (imageMetadata->get("lux.status", lux) != 0)
+ LOG(RPiAgc, Warning) << "No lux level found";
+ const Histogram &h = statistics->yHist;
+ double evGain = status_.ev * config_.baseEv;
+ /*
+ * The initial gain and target_Y come from some of the regions. After
+ * that we consider the histogram constraints.
+ */
+ targetY = config_.yTarget.eval(config_.yTarget.domain().clip(lux.lux));
+ targetY = std::min(EvGainYTargetLimit, targetY * evGain);
+
+ /*
+ * Do this calculation a few times as brightness increase can be
+ * non-linear when there are saturated regions.
+ */
+ gain = 1.0;
+ for (int i = 0; i < 8; i++) {
+ double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
+ double extraGain = std::min(10.0, targetY / (initialY + .001));
+ gain *= extraGain;
+ LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
+ << " gives gain " << gain;
+ if (extraGain < 1.01) /* close enough */
+ break;
+ }
+
+ for (auto &c : *constraintMode_) {
+ double newTargetY;
+ double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
+ LOG(RPiAgc, Debug) << "Constraint has target_Y "
+ << newTargetY << " giving gain " << newGain;
+ if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
+ LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
+ gain = newGain;
+ targetY = newTargetY;
+ } else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
+ LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
+ gain = newGain;
+ targetY = newTargetY;
+ }
+ }
+ LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
+ << status_.ev << " base_ev " << config_.baseEv
+ << ")";
+}
+
+void AgcChannel::computeTargetExposure(double gain)
+{
+ if (status_.fixedShutter && status_.fixedAnalogueGain) {
+ /*
+ * When ag and shutter are both fixed, we need to drive the
+ * total exposure so that we end up with a digital gain of at least
+ * 1/minColourGain. Otherwise we'd desaturate channels causing
+ * white to go cyan or magenta.
+ */
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
+ target_.totalExposure =
+ status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
+ } else {
+ /*
+ * The statistics reflect the image without digital gain, so the final
+ * total exposure we're aiming for is:
+ */
+ target_.totalExposure = current_.totalExposureNoDG * gain;
+ /* The final target exposure is also limited to what the exposure mode allows. */
+ Duration maxShutter = status_.fixedShutter
+ ? status_.fixedShutter
+ : exposureMode_->shutter.back();
+ maxShutter = limitShutter(maxShutter);
+ Duration maxTotalExposure =
+ maxShutter *
+ (status_.fixedAnalogueGain != 0.0
+ ? status_.fixedAnalogueGain
+ : exposureMode_->gain.back());
+ target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
+ }
+ LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
+}
+
+bool AgcChannel::applyChannelConstraints(const AgcChannelTotalExposures &channelTotalExposures)
+{
+ bool channelBound = false;
+ LOG(RPiAgc, Debug)
+ << "Total exposure before channel constraints " << filtered_.totalExposure;
+
+ for (const auto &constraint : config_.channelConstraints) {
+ LOG(RPiAgc, Debug)
+ << "Check constraint: channel " << constraint.channel << " bound "
+ << (constraint.bound == AgcChannelConstraint::Bound::UPPER ? "UPPER" : "LOWER")
+ << " factor " << constraint.factor;
+ if (constraint.channel >= channelTotalExposures.size() ||
+ !channelTotalExposures[constraint.channel]) {
+ LOG(RPiAgc, Debug) << "no such channel or no exposure available- skipped";
+ continue;
+ }
+
+ libcamera::utils::Duration limitExposure =
+ channelTotalExposures[constraint.channel] * constraint.factor;
+ LOG(RPiAgc, Debug) << "Limit exposure " << limitExposure;
+ if ((constraint.bound == AgcChannelConstraint::Bound::UPPER &&
+ filtered_.totalExposure > limitExposure) ||
+ (constraint.bound == AgcChannelConstraint::Bound::LOWER &&
+ filtered_.totalExposure < limitExposure)) {
+ filtered_.totalExposure = limitExposure;
+ LOG(RPiAgc, Debug) << "Constraint applies";
+ channelBound = true;
+ } else
+ LOG(RPiAgc, Debug) << "Constraint does not apply";
+ }
+
+ LOG(RPiAgc, Debug)
+ << "Total exposure after channel constraints " << filtered_.totalExposure;
+
+ return channelBound;
+}
+
+bool AgcChannel::applyDigitalGain(double gain, double targetY, bool channelBound)
+{
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
+ double dg = 1.0 / minColourGain;
+ /*
+ * I think this pipeline subtracts black level and rescales before we
+ * get the stats, so no need to worry about it.
+ */
+ LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
+ << " target_Y " << targetY;
+ /*
+ * Finally, if we're trying to reduce exposure but the target_Y is
+ * "close" to 1.0, then the gain computed for that constraint will be
+ * only slightly less than one, because the measured Y can never be
+ * larger than 1.0. When this happens, demand a large digital gain so
+ * that the exposure can be reduced, de-saturating the image much more
+ * quickly (and we then approach the correct value more quickly from
+ * below).
+ */
+ bool desaturate = !channelBound &&
+ targetY > config_.fastReduceThreshold && gain < sqrt(targetY);
+ if (desaturate)
+ dg /= config_.fastReduceThreshold;
+ LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
+ filtered_.totalExposureNoDG = filtered_.totalExposure / dg;
+ LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << filtered_.totalExposureNoDG;
+ return desaturate;
+}
+
+void AgcChannel::filterExposure()
+{
+ double speed = config_.speed;
+ /*
+ * AGC adapts instantly if both shutter and gain are directly specified
+ * or we're in the startup phase.
+ */
+ if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
+ frameCount_ <= config_.startupFrames)
+ speed = 1.0;
+ if (!filtered_.totalExposure) {
+ filtered_.totalExposure = target_.totalExposure;
+ } else {
+ /*
+ * If close to the result go faster, to save making so many
+ * micro-adjustments on the way. (Make this customisable?)
+ */
+ if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
+ filtered_.totalExposure > 0.8 * target_.totalExposure)
+ speed = sqrt(speed);
+ filtered_.totalExposure = speed * target_.totalExposure +
+ filtered_.totalExposure * (1.0 - speed);
+ }
+ LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
+ << " no dg " << filtered_.totalExposureNoDG;
+}
+
+void AgcChannel::divideUpExposure()
+{
+ /*
+ * Sending the fixed shutter/gain cases through the same code may seem
+ * unnecessary, but it will make more sense when extend this to cover
+ * variable aperture.
+ */
+ Duration exposureValue = filtered_.totalExposureNoDG;
+ Duration shutterTime;
+ double analogueGain;
+ shutterTime = status_.fixedShutter ? status_.fixedShutter
+ : exposureMode_->shutter[0];
+ shutterTime = limitShutter(shutterTime);
+ analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
+ : exposureMode_->gain[0];
+ analogueGain = limitGain(analogueGain);
+ if (shutterTime * analogueGain < exposureValue) {
+ for (unsigned int stage = 1;
+ stage < exposureMode_->gain.size(); stage++) {
+ if (!status_.fixedShutter) {
+ Duration stageShutter =
+ limitShutter(exposureMode_->shutter[stage]);
+ if (stageShutter * analogueGain >= exposureValue) {
+ shutterTime = exposureValue / analogueGain;
+ break;
+ }
+ shutterTime = stageShutter;
+ }
+ if (status_.fixedAnalogueGain == 0.0) {
+ if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
+ analogueGain = exposureValue / shutterTime;
+ break;
+ }
+ analogueGain = exposureMode_->gain[stage];
+ analogueGain = limitGain(analogueGain);
+ }
+ }
+ }
+ LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
+ << analogueGain;
+ /*
+ * Finally adjust shutter time for flicker avoidance (require both
+ * shutter and gain not to be fixed).
+ */
+ if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
+ status_.flickerPeriod) {
+ int flickerPeriods = shutterTime / status_.flickerPeriod;
+ if (flickerPeriods) {
+ Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
+ analogueGain *= shutterTime / newShutterTime;
+ /*
+ * We should still not allow the ag to go over the
+ * largest value in the exposure mode. Note that this
+ * may force more of the total exposure into the digital
+ * gain as a side-effect.
+ */
+ analogueGain = std::min(analogueGain, exposureMode_->gain.back());
+ analogueGain = limitGain(analogueGain);
+ shutterTime = newShutterTime;
+ }
+ LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
+ << shutterTime << " gain " << analogueGain;
+ }
+ filtered_.shutter = shutterTime;
+ filtered_.analogueGain = analogueGain;
+}
+
+void AgcChannel::writeAndFinish(Metadata *imageMetadata, bool desaturate)
+{
+ status_.totalExposureValue = filtered_.totalExposure;
+ status_.targetExposureValue = desaturate ? 0s : target_.totalExposureNoDG;
+ status_.shutterTime = filtered_.shutter;
+ status_.analogueGain = filtered_.analogueGain;
+ /*
+ * Write to metadata as well, in case anyone wants to update the camera
+ * immediately.
+ */
+ imageMetadata->set("agc.status", status_);
+ LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
+ << filtered_.totalExposure;
+ LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
+ << " analogue gain " << filtered_.analogueGain;
+}
+
+Duration AgcChannel::limitShutter(Duration shutter)
+{
+ /*
+ * shutter == 0 is a special case for fixed shutter values, and must pass
+ * through unchanged
+ */
+ if (!shutter)
+ return shutter;
+
+ shutter = std::clamp(shutter, mode_.minShutter, maxShutter_);
+ return shutter;
+}
+
+double AgcChannel::limitGain(double gain) const
+{
+ /*
+ * Only limit the lower bounds of the gain value to what the sensor limits.
+ * The upper bound on analogue gain will be made up with additional digital
+ * gain applied by the ISP.
+ *
+ * gain == 0.0 is a special case for fixed shutter values, and must pass
+ * through unchanged
+ */
+ if (!gain)
+ return gain;
+
+ gain = std::max(gain, mode_.minAnalogueGain);
+ return gain;
+}
diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h
new file mode 100644
index 0000000..ae826fa
--- /dev/null
+++ b/src/ipa/rpi/controller/rpi/agc_channel.h
@@ -0,0 +1,151 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2023, Raspberry Pi Ltd
+ *
+ * agc_channel.h - AGC/AEC control algorithm
+ */
+#pragma once
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include <libcamera/base/utils.h>
+
+#include "../agc_status.h"
+#include "../awb_status.h"
+#include "../controller.h"
+#include "../pwl.h"
+
+/* This is our implementation of AGC. */
+
+namespace RPiController {
+
+using AgcChannelTotalExposures = std::vector<libcamera::utils::Duration>;
+
+struct AgcMeteringMode {
+ std::vector<double> weights;
+ int read(const libcamera::YamlObject ¶ms);
+};
+
+struct AgcExposureMode {
+ std::vector<libcamera::utils::Duration> shutter;
+ std::vector<double> gain;
+ int read(const libcamera::YamlObject ¶ms);
+};
+
+struct AgcConstraint {
+ enum class Bound { LOWER = 0,
+ UPPER = 1 };
+ Bound bound;
+ double qLo;
+ double qHi;
+ Pwl yTarget;
+ int read(const libcamera::YamlObject ¶ms);
+};
+
+typedef std::vector<AgcConstraint> AgcConstraintMode;
+
+struct AgcChannelConstraint {
+ enum class Bound { LOWER = 0,
+ UPPER = 1 };
+ Bound bound;
+ unsigned int channel;
+ double factor;
+ int read(const libcamera::YamlObject ¶ms);
+};
+
+struct AgcConfig {
+ int read(const libcamera::YamlObject ¶ms);
+ std::map<std::string, AgcMeteringMode> meteringModes;
+ std::map<std::string, AgcExposureMode> exposureModes;
+ std::map<std::string, AgcConstraintMode> constraintModes;
+ std::vector<AgcChannelConstraint> channelConstraints;
+ Pwl yTarget;
+ double speed;
+ uint16_t startupFrames;
+ unsigned int convergenceFrames;
+ double maxChange;
+ double minChange;
+ double fastReduceThreshold;
+ double speedUpThreshold;
+ std::string defaultMeteringMode;
+ std::string defaultExposureMode;
+ std::string defaultConstraintMode;
+ double baseEv;
+ libcamera::utils::Duration defaultExposureTime;
+ double defaultAnalogueGain;
+};
+
+class AgcChannel
+{
+public:
+ AgcChannel();
+ int read(const libcamera::YamlObject ¶ms,
+ const Controller::HardwareConfig &hardwareConfig);
+ unsigned int getConvergenceFrames() const;
+ std::vector<double> const &getWeights() const;
+ void setEv(double ev);
+ void setFlickerPeriod(libcamera::utils::Duration flickerPeriod);
+ void setMaxShutter(libcamera::utils::Duration maxShutter);
+ void setFixedShutter(libcamera::utils::Duration fixedShutter);
+ void setFixedAnalogueGain(double fixedAnalogueGain);
+ void setMeteringMode(std::string const &meteringModeName);
+ void setExposureMode(std::string const &exposureModeName);
+ void setConstraintMode(std::string const &contraintModeName);
+ void enableAuto();
+ void disableAuto();
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata);
+ void prepare(Metadata *imageMetadata);
+ void process(StatisticsPtr &stats, DeviceStatus const &deviceStatus, Metadata *imageMetadata,
+ const AgcChannelTotalExposures &channelTotalExposures);
+
+private:
+ bool updateLockStatus(DeviceStatus const &deviceStatus);
+ AgcConfig config_;
+ void housekeepConfig();
+ void fetchCurrentExposure(DeviceStatus const &deviceStatus);
+ void fetchAwbStatus(Metadata *imageMetadata);
+ void computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
+ double &gain, double &targetY);
+ void computeTargetExposure(double gain);
+ void filterExposure();
+ bool applyChannelConstraints(const AgcChannelTotalExposures &channelTotalExposures);
+ bool applyDigitalGain(double gain, double targetY, bool channelBound);
+ void divideUpExposure();
+ void writeAndFinish(Metadata *imageMetadata, bool desaturate);
+ libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter);
+ double limitGain(double gain) const;
+ AgcMeteringMode *meteringMode_;
+ AgcExposureMode *exposureMode_;
+ AgcConstraintMode *constraintMode_;
+ CameraMode mode_;
+ uint64_t frameCount_;
+ AwbStatus awb_;
+ struct ExposureValues {
+ ExposureValues();
+
+ libcamera::utils::Duration shutter;
+ double analogueGain;
+ libcamera::utils::Duration totalExposure;
+ libcamera::utils::Duration totalExposureNoDG; /* without digital gain */
+ };
+ ExposureValues current_; /* values for the current frame */
+ ExposureValues target_; /* calculate the values we want here */
+ ExposureValues filtered_; /* these values are filtered towards target */
+ AgcStatus status_;
+ int lockCount_;
+ DeviceStatus lastDeviceStatus_;
+ libcamera::utils::Duration lastTargetExposure_;
+ /* Below here the "settings" that applications can change. */
+ std::string meteringModeName_;
+ std::string exposureModeName_;
+ std::string constraintModeName_;
+ double ev_;
+ libcamera::utils::Duration flickerPeriod_;
+ libcamera::utils::Duration maxShutter_;
+ libcamera::utils::Duration fixedShutter_;
+ double fixedAnalogueGain_;
+};
+
+} /* namespace RPiController */
diff --git a/src/ipa/rpi/vc4/vc4.cpp b/src/ipa/rpi/vc4/vc4.cpp
index 789a345..1de0d3c 100644
--- a/src/ipa/rpi/vc4/vc4.cpp
+++ b/src/ipa/rpi/vc4/vc4.cpp
@@ -60,7 +60,7 @@
bool validateIspControls();
void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls);
- void applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls);
+ void applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls);
void applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls);
void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls);
void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls);
@@ -142,7 +142,7 @@
if (ccmStatus)
applyCCM(ccmStatus, ctrls);
- AgcStatus *dgStatus = rpiMetadata.getLocked<AgcStatus>("agc.status");
+ AgcPrepareStatus *dgStatus = rpiMetadata.getLocked<AgcPrepareStatus>("agc.prepare_status");
if (dgStatus)
applyDG(dgStatus, ctrls);
@@ -191,7 +191,7 @@
using namespace RPiController;
const bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
- StatisticsPtr statistics = std::make_unique<Statistics>(Statistics::AgcStatsPos::PreWb,
+ StatisticsPtr statistics = std::make_shared<Statistics>(Statistics::AgcStatsPos::PreWb,
Statistics::ColourStatsPos::PostLsc);
const Controller::HardwareConfig &hw = controller_.getHardwareConfig();
unsigned int i;
@@ -284,7 +284,7 @@
static_cast<int32_t>(awbStatus->gainB * 1000));
}
-void IpaVc4::applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls)
+void IpaVc4::applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls)
{
ctrls.set(V4L2_CID_DIGITAL_GAIN,
static_cast<int32_t>(dgStatus->digitalGain * 1000));
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
index 97acafb..51fa1bb 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
@@ -695,7 +695,7 @@
* outside the v4l2 device. Store it in the stream buffer list
* so we can track it.
*/
- stream->setExternalBuffer(buffer);
+ stream->setExportedBuffer(buffer);
}
/*
@@ -1383,11 +1383,6 @@
Request *request = requestQueue_.empty() ? nullptr : requestQueue_.front();
if (!dropFrameCount_ && request && request->findBuffer(stream) == buffer) {
/*
- * Check if this is an externally provided buffer, and if
- * so, we must stop tracking it in the pipeline handler.
- */
- handleExternalBuffer(buffer, stream);
- /*
* Tag the buffer as completed, returning it to the
* application.
*/
@@ -1426,17 +1421,6 @@
}
}
-void CameraData::handleExternalBuffer(FrameBuffer *buffer, RPi::Stream *stream)
-{
- unsigned int id = stream->getBufferId(buffer);
-
- if (!(id & MaskExternalBuffer))
- return;
-
- /* Stop the Stream object from tracking the buffer. */
- stream->removeExternalBuffer(buffer);
-}
-
void CameraData::checkRequestCompleted()
{
bool requestCompleted = false;
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h
index a139c98..dbabc61 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.h
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h
@@ -196,7 +196,6 @@
unsigned int ispOutputTotal_;
private:
- void handleExternalBuffer(FrameBuffer *buffer, Stream *stream);
void checkRequestCompleted();
};
diff --git a/src/libcamera/pipeline/rpi/common/rpi_stream.cpp b/src/libcamera/pipeline/rpi/common/rpi_stream.cpp
index c158843..7319f51 100644
--- a/src/libcamera/pipeline/rpi/common/rpi_stream.cpp
+++ b/src/libcamera/pipeline/rpi/common/rpi_stream.cpp
@@ -8,6 +8,9 @@
#include <libcamera/base/log.h>
+/* Maximum number of buffer slots to allocate in the V4L2 device driver. */
+static constexpr unsigned int maxV4L2BufferCount = 32;
+
namespace libcamera {
LOG_DEFINE_CATEGORY(RPISTREAM)
@@ -53,7 +56,7 @@
void Stream::setExportedBuffers(std::vector<std::unique_ptr<FrameBuffer>> *buffers)
{
for (auto const &buffer : *buffers)
- bufferMap_.emplace(id_.get(), buffer.get());
+ bufferMap_.emplace(++id_, buffer.get());
}
const BufferMap &Stream::getBuffers() const
@@ -76,18 +79,9 @@
return it->first;
}
-void Stream::setExternalBuffer(FrameBuffer *buffer)
+void Stream::setExportedBuffer(FrameBuffer *buffer)
{
- bufferMap_.emplace(BufferMask::MaskExternalBuffer | id_.get(), buffer);
-}
-
-void Stream::removeExternalBuffer(FrameBuffer *buffer)
-{
- unsigned int id = getBufferId(buffer);
-
- /* Ensure we have this buffer in the stream, and it is marked external. */
- ASSERT(id & BufferMask::MaskExternalBuffer);
- bufferMap_.erase(id);
+ bufferMap_.emplace(++id_, buffer);
}
int Stream::prepareBuffers(unsigned int count)
@@ -95,34 +89,17 @@
int ret;
if (!(flags_ & StreamFlag::ImportOnly)) {
- if (count) {
- /* Export some frame buffers for internal use. */
- ret = dev_->exportBuffers(count, &internalBuffers_);
- if (ret < 0)
- return ret;
+ /* Export some frame buffers for internal use. */
+ ret = dev_->exportBuffers(count, &internalBuffers_);
+ if (ret < 0)
+ return ret;
- /* Add these exported buffers to the internal/external buffer list. */
- setExportedBuffers(&internalBuffers_);
- resetBuffers();
- }
-
- /* We must import all internal/external exported buffers. */
- count = bufferMap_.size();
+ /* Add these exported buffers to the internal/external buffer list. */
+ setExportedBuffers(&internalBuffers_);
+ resetBuffers();
}
- /*
- * If this is an external stream, we must allocate slots for buffers that
- * might be externally allocated. We have no indication of how many buffers
- * may be used, so this might overallocate slots in the buffer cache.
- * Similarly, if this stream is only importing buffers, we do the same.
- *
- * \todo Find a better heuristic, or, even better, an exact solution to
- * this issue.
- */
- if ((flags_ & StreamFlag::External) || (flags_ & StreamFlag::ImportOnly))
- count = count * 2;
-
- return dev_->importBuffers(count);
+ return dev_->importBuffers(maxV4L2BufferCount);
}
int Stream::queueBuffer(FrameBuffer *buffer)
@@ -175,9 +152,6 @@
/* Push this buffer back into the queue to be used again. */
availableBuffers_.push(buffer);
- /* Allow the buffer id to be reused. */
- id_.release(getBufferId(buffer));
-
/*
* Do we have any Request buffers that are waiting to be queued?
* If so, do it now as availableBuffers_ will not be empty.
@@ -236,7 +210,7 @@
requestBuffers_ = std::queue<FrameBuffer *>{};
internalBuffers_.clear();
bufferMap_.clear();
- id_.reset();
+ id_ = 0;
}
int Stream::queueToDevice(FrameBuffer *buffer)
diff --git a/src/libcamera/pipeline/rpi/common/rpi_stream.h b/src/libcamera/pipeline/rpi/common/rpi_stream.h
index 6edd304..889b499 100644
--- a/src/libcamera/pipeline/rpi/common/rpi_stream.h
+++ b/src/libcamera/pipeline/rpi/common/rpi_stream.h
@@ -28,7 +28,6 @@
MaskStats = 0x010000,
MaskEmbeddedData = 0x020000,
MaskBayerData = 0x040000,
- MaskExternalBuffer = 0x100000,
};
/*
@@ -55,13 +54,13 @@
using StreamFlags = Flags<StreamFlag>;
Stream()
- : flags_(StreamFlag::None), id_(BufferMask::MaskID)
+ : flags_(StreamFlag::None), id_(0)
{
}
Stream(const char *name, MediaEntity *dev, StreamFlags flags = StreamFlag::None)
: flags_(flags), name_(name),
- dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(BufferMask::MaskID)
+ dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(0)
{
}
@@ -77,8 +76,7 @@
const BufferMap &getBuffers() const;
unsigned int getBufferId(FrameBuffer *buffer) const;
- void setExternalBuffer(FrameBuffer *buffer);
- void removeExternalBuffer(FrameBuffer *buffer);
+ void setExportedBuffer(FrameBuffer *buffer);
int prepareBuffers(unsigned int count);
int queueBuffer(FrameBuffer *buffer);
@@ -88,44 +86,6 @@
void releaseBuffers();
private:
- class IdGenerator
- {
- public:
- IdGenerator(unsigned int max)
- : max_(max), id_(0)
- {
- }
-
- unsigned int get()
- {
- unsigned int id;
- if (!recycle_.empty()) {
- id = recycle_.front();
- recycle_.pop();
- } else {
- id = ++id_;
- ASSERT(id_ <= max_);
- }
- return id;
- }
-
- void release(unsigned int id)
- {
- recycle_.push(id);
- }
-
- void reset()
- {
- id_ = 0;
- recycle_ = {};
- }
-
- private:
- unsigned int max_;
- unsigned int id_;
- std::queue<unsigned int> recycle_;
- };
-
void clearBuffers();
int queueToDevice(FrameBuffer *buffer);
@@ -138,7 +98,7 @@
std::unique_ptr<V4L2VideoDevice> dev_;
/* Tracks a unique id key for the bufferMap_ */
- IdGenerator id_;
+ unsigned int id_;
/* All frame buffers associated with this device stream. */
BufferMap bufferMap_;
diff --git a/src/libcamera/stream.cpp b/src/libcamera/stream.cpp
index 272222b..f3e00ea 100644
--- a/src/libcamera/stream.cpp
+++ b/src/libcamera/stream.cpp
@@ -311,7 +311,8 @@
* The stride value reports the number of bytes between the beginning of
* successive lines in an image buffer for this stream. The value is
* valid after successfully validating the configuration with a call to
- * CameraConfiguration::validate().
+ * CameraConfiguration::validate(). For compressed formats (such as MJPEG),
+ * this value will be zero.
*/
/**