usb: Add scale and jpeg converter in image_processor

Also set exif tags from metadata.

BUG=b:24674340
TEST=image has correct exif info.

Change-Id: If71b610475376f348d78d709982f7c19c5ad4df9
Reviewed-on: https://chromium-review.googlesource.com/455605
Commit-Ready: Heng-ruey Hsu <henryhsu@chromium.org>
Tested-by: Heng-ruey Hsu <henryhsu@chromium.org>
Reviewed-by: Ricky Liang <jcliang@chromium.org>
diff --git a/hal/usb/cached_frame.cc b/hal/usb/cached_frame.cc
index b6b79e9..548ce73 100644
--- a/hal/usb/cached_frame.cc
+++ b/hal/usb/cached_frame.cc
@@ -72,11 +72,31 @@
                                           yu12_frame_->GetHeight());
 }
 
-int CachedFrame::Convert(FrameBuffer* out_frame, bool video_hack) {
+int CachedFrame::Convert(const CameraMetadata& metadata,
+                         FrameBuffer* out_frame,
+                         bool video_hack) {
   if (video_hack && out_frame->GetFourcc() == V4L2_PIX_FMT_YVU420) {
     out_frame->SetFourcc(V4L2_PIX_FMT_YUV420);
   }
-  return ImageProcessor::Convert(*yu12_frame_.get(), out_frame);
+
+  FrameBuffer* source_frame = yu12_frame_.get();
+  if (GetWidth() != out_frame->GetWidth() ||
+      GetHeight() != out_frame->GetHeight()) {
+    size_t cache_size = ImageProcessor::GetConvertedSize(
+        yu12_frame_->GetFourcc(), out_frame->GetWidth(),
+        out_frame->GetHeight());
+    if (cache_size == 0) {
+      return -EINVAL;
+    } else if (cache_size > scaled_frame_->GetBufferSize()) {
+      scaled_frame_.reset(new AllocatedFrameBuffer(cache_size));
+    }
+    scaled_frame_->SetWidth(out_frame->GetWidth());
+    scaled_frame_->SetHeight(out_frame->GetHeight());
+    ImageProcessor::Scale(*yu12_frame_.get(), scaled_frame_.get());
+
+    source_frame = scaled_frame_.get();
+  }
+  return ImageProcessor::ConvertFormat(metadata, *source_frame, out_frame);
 }
 
 int CachedFrame::ConvertToYU12() {
@@ -91,7 +111,8 @@
   yu12_frame_->SetWidth(source_frame_->GetWidth());
   yu12_frame_->SetHeight(source_frame_->GetHeight());
 
-  int res = ImageProcessor::Convert(*source_frame_, yu12_frame_.get());
+  int res = ImageProcessor::ConvertFormat(CameraMetadata(), *source_frame_,
+                                          yu12_frame_.get());
   if (res) {
     LOGF(ERROR) << "Convert from " << FormatToString(source_frame_->GetFourcc())
                 << " to YU12 fails.";
diff --git a/hal/usb/cached_frame.h b/hal/usb/cached_frame.h
index 3826a84..c6f484b 100644
--- a/hal/usb/cached_frame.h
+++ b/hal/usb/cached_frame.h
@@ -8,6 +8,7 @@
 
 #include <memory>
 
+#include "arc/camera_metadata.h"
 #include "hal/usb/image_processor.h"
 
 namespace arc {
@@ -45,12 +46,15 @@
   // 0 on error.
   size_t GetConvertedSize(int fourcc) const;
 
-  // Caller should fill |fourcc|, |data|, and |buffer_size| of |out_frame|. The
-  // function will fill other members in |out_frame|.
+  // Caller should fill everything except |data_size| and |fd| of |out_frame|.
+  // The function will do format conversion and scale to fit |out_frame|
+  // requirement.
   // If |video_hack| is true, it outputs YU12 when |hal_pixel_format| is YV12
   // (swapping U/V planes). Caller should fill |fourcc|, |data|, and
   // Return non-zero error code on failure; return 0 on success.
-  int Convert(FrameBuffer* out_frame, bool video_hack = false);
+  int Convert(const CameraMetadata& metadata,
+              FrameBuffer* out_frame,
+              bool video_hack = false);
 
  private:
   int ConvertToYU12();
@@ -71,6 +75,9 @@
 
   // Cache YU12 decoded results.
   std::unique_ptr<AllocatedFrameBuffer> yu12_frame_;
+
+  // Temporary buffer for scaled results.
+  std::unique_ptr<AllocatedFrameBuffer> scaled_frame_;
 };
 
 }  // namespace arc
diff --git a/hal/usb/camera_client.cc b/hal/usb/camera_client.cc
index a3d3dff..bdafac9 100644
--- a/hal/usb/camera_client.cc
+++ b/hal/usb/camera_client.cc
@@ -395,6 +395,7 @@
                          << ", Number of output buffers: "
                          << capture_result.num_output_buffers;
 
+  CameraMetadata* metadata = request->GetMetadata();
   // Handle each stream output buffer and convert it to corresponding format.
   for (size_t i = 0; i < capture_result.num_output_buffers; i++) {
     camera3_stream_buffer_t* buffer =
@@ -414,10 +415,9 @@
       return;
     }
 
-    input_frame_.Convert(&output_frame);
+    input_frame_.Convert(*metadata, &output_frame);
   }
 
-  CameraMetadata* metadata = request->GetMetadata();
   int64_t timestamp;
   NotifyShutter(capture_result.frame_number, &timestamp);
   metadata->Update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
diff --git a/hal/usb/image_processor.cc b/hal/usb/image_processor.cc
index fb13d8d..dcc0ab7 100644
--- a/hal/usb/image_processor.cc
+++ b/hal/usb/image_processor.cc
@@ -7,8 +7,11 @@
 
 #include <errno.h>
 #include <libyuv.h>
+#include <time.h>
 
 #include "arc/common.h"
+#include "arc/exif_utils.h"
+#include "arc/jpeg_compressor.h"
 #include "hal/usb/common_types.h"
 
 namespace arc {
@@ -54,6 +57,13 @@
                       int dst_stride_y,
                       int dst_stride_uv);
 static int YU12ToNV21(const void* yv12, void* nv21, int width, int height);
+static bool ConvertToJpeg(const CameraMetadata& metadata,
+                          const FrameBuffer& in_frame,
+                          FrameBuffer* out_frame);
+static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils);
+
+// How precise the float-to-rational conversion for EXIF tags would be.
+static const int kRationalPrecision = 10000;
 
 inline static size_t Align16(size_t value) {
   return (value + 15) & ~15;
@@ -78,14 +88,15 @@
     case V4L2_PIX_FMT_RGB32:
       return width * height * 4;
     default:
-      LOGF(ERROR) << "Pixel format 0x" << std::hex << fourcc
+      LOGF(ERROR) << "Pixel format " << FormatToString(fourcc)
                   << " is unsupported.";
       return 0;
   }
 }
 
-int ImageProcessor::Convert(const FrameBuffer& in_frame,
-                            FrameBuffer* out_frame) {
+int ImageProcessor::ConvertFormat(const CameraMetadata& metadata,
+                                  const FrameBuffer& in_frame,
+                                  FrameBuffer* out_frame) {
   if ((in_frame.GetWidth() % 2) || (in_frame.GetHeight() % 2)) {
     LOGF(ERROR) << "Width or height is not even (" << in_frame.GetWidth()
                 << " x " << in_frame.GetHeight() << ")";
@@ -165,6 +176,11 @@
         LOGF_IF(ERROR, res) << "I420ToABGR() returns " << res;
         return res ? -EINVAL : 0;
       }
+      case V4L2_PIX_FMT_JPEG: {
+        int res = ConvertToJpeg(metadata, in_frame, out_frame);
+        LOGF_IF(ERROR, res) << "ConvertToJpeg() returns " << res;
+        return res ? -EINVAL : 0;
+      }
       default:
         LOGF(ERROR) << "Destination pixel format "
                     << FormatToString(out_frame->GetFourcc())
@@ -202,6 +218,43 @@
   }
 }
 
+int ImageProcessor::Scale(const FrameBuffer& in_frame, FrameBuffer* out_frame) {
+  if (in_frame.GetFourcc() != V4L2_PIX_FMT_YUV420) {
+    LOGF(ERROR) << "Pixel format " << FormatToString(in_frame.GetFourcc())
+                << " is unsupported.";
+    return -EINVAL;
+  }
+
+  size_t data_size = GetConvertedSize(
+      in_frame.GetFourcc(), out_frame->GetWidth(), out_frame->GetHeight());
+
+  if (out_frame->SetDataSize(data_size)) {
+    LOGF(ERROR) << "Set data size failed";
+    return -EINVAL;
+  }
+  out_frame->SetFourcc(in_frame.GetFourcc());
+
+  VLOGF(1) << "Scale image from " << in_frame.GetWidth() << "x"
+           << in_frame.GetHeight() << " to " << out_frame->GetWidth() << "x"
+           << out_frame->GetHeight();
+
+  int ret = libyuv::I420Scale(
+      in_frame.GetData(), in_frame.GetWidth(),
+      in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight(),
+      in_frame.GetWidth() / 2,
+      in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4,
+      in_frame.GetWidth() / 2, in_frame.GetWidth(), in_frame.GetHeight(),
+      out_frame->GetData(), out_frame->GetWidth(),
+      out_frame->GetData() + out_frame->GetWidth() * out_frame->GetHeight(),
+      out_frame->GetWidth() / 2,
+      out_frame->GetData() +
+          out_frame->GetWidth() * out_frame->GetHeight() * 5 / 4,
+      out_frame->GetWidth() / 2, out_frame->GetWidth(), out_frame->GetHeight(),
+      libyuv::FilterMode::kFilterNone);
+  LOGF_IF(ERROR, ret) << "I420Scale failed: " << ret;
+  return ret;
+}
+
 static int YU12ToYV12(const void* yu12,
                       void* yv12,
                       int width,
@@ -256,4 +309,144 @@
   return 0;
 }
 
+static bool ConvertToJpeg(const CameraMetadata& metadata,
+                          const FrameBuffer& in_frame,
+                          FrameBuffer* out_frame) {
+  ExifUtils utils;
+  int jpeg_quality, thumbnail_jpeg_quality;
+  camera_metadata_ro_entry entry = metadata.Find(ANDROID_JPEG_QUALITY);
+  if (entry.count) {
+    jpeg_quality = entry.data.u8[0];
+  } else {
+    LOGF(ERROR) << "Cannot find jpeg quality in metadata.";
+    return false;
+  }
+  if (metadata.Exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
+    entry = metadata.Find(ANDROID_JPEG_THUMBNAIL_QUALITY);
+    thumbnail_jpeg_quality = entry.data.u8[0];
+  } else {
+    thumbnail_jpeg_quality = jpeg_quality;
+  }
+
+  if (!utils.Initialize(in_frame.GetData(), in_frame.GetWidth(),
+                        in_frame.GetHeight(), thumbnail_jpeg_quality)) {
+    LOGF(ERROR) << "ExifUtils initialization failed.";
+    return false;
+  }
+  if (!SetExifTags(metadata, &utils)) {
+    LOGF(ERROR) << "Setting Exif tags failed.";
+    return false;
+  }
+  if (!utils.GenerateApp1()) {
+    LOGF(ERROR) << "Generating APP1 segment failed.";
+    return false;
+  }
+  JpegCompressor compressor;
+  if (!compressor.CompressImage(in_frame.GetData(), in_frame.GetWidth(),
+                                in_frame.GetHeight(), jpeg_quality,
+                                utils.GetApp1Buffer(), utils.GetApp1Length())) {
+    LOGF(ERROR) << "JPEG image compression failed";
+    return false;
+  }
+  size_t buffer_length = compressor.GetCompressedImageSize();
+  memcpy(out_frame->GetData(), compressor.GetCompressedImagePtr(),
+         buffer_length);
+  return true;
+}
+
+static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils) {
+  time_t raw_time = 0;
+  struct tm time_info;
+  bool time_available = time(&raw_time) != -1;
+  localtime_r(&raw_time, &time_info);
+  if (!utils->SetDateTime(time_info)) {
+    LOGF(ERROR) << "Setting data time failed.";
+    return false;
+  }
+
+  float focal_length;
+  camera_metadata_ro_entry entry = metadata.Find(ANDROID_LENS_FOCAL_LENGTH);
+  if (entry.count) {
+    focal_length = entry.data.f[0];
+  } else {
+    LOGF(ERROR) << "Cannot find focal length in metadata.";
+    return false;
+  }
+  if (!utils->SetFocalLength(
+          static_cast<uint32_t>(focal_length * kRationalPrecision),
+          kRationalPrecision)) {
+    LOGF(ERROR) << "Setting focal length failed.";
+    return false;
+  }
+
+  if (metadata.Exists(ANDROID_JPEG_GPS_COORDINATES)) {
+    entry = metadata.Find(ANDROID_JPEG_GPS_COORDINATES);
+    if (entry.count < 3) {
+      LOGF(ERROR) << "Gps coordinates in metadata is not complete.";
+      return false;
+    }
+    if (!utils->SetGpsLatitude(entry.data.d[0])) {
+      LOGF(ERROR) << "Setting gps latitude failed.";
+      return false;
+    }
+    if (!utils->SetGpsLongitude(entry.data.d[1])) {
+      LOGF(ERROR) << "Setting gps longitude failed.";
+      return false;
+    }
+    if (!utils->SetGpsAltitude(entry.data.d[2])) {
+      LOGF(ERROR) << "Setting gps altitude failed.";
+      return false;
+    }
+  }
+
+  if (metadata.Exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
+    entry = metadata.Find(ANDROID_JPEG_GPS_PROCESSING_METHOD);
+    std::string method_str(reinterpret_cast<const char*>(entry.data.u8));
+    if (!utils->SetGpsProcessingMethod(method_str)) {
+      LOGF(ERROR) << "Setting gps processing method failed.";
+      return false;
+    }
+  }
+
+  if (time_available && metadata.Exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
+    entry = metadata.Find(ANDROID_JPEG_GPS_TIMESTAMP);
+    time_t timestamp = static_cast<time_t>(entry.data.i64[0]);
+    if (gmtime_r(&timestamp, &time_info)) {
+      if (!utils->SetGpsTimestamp(time_info)) {
+        LOGF(ERROR) << "Setting gps timestamp failed.";
+        return false;
+      }
+    } else {
+      LOGF(ERROR) << "Time tranformation failed.";
+      return false;
+    }
+  }
+
+  if (metadata.Exists(ANDROID_JPEG_ORIENTATION)) {
+    entry = metadata.Find(ANDROID_JPEG_ORIENTATION);
+    if (!utils->SetOrientation(entry.data.i32[0])) {
+      LOGF(ERROR) << "Setting orientation failed.";
+      return false;
+    }
+  }
+
+  if (metadata.Exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+    entry = metadata.Find(ANDROID_JPEG_THUMBNAIL_SIZE);
+    if (entry.count < 2) {
+      LOGF(ERROR) << "Thumbnail size in metadata is not complete.";
+      return false;
+    }
+    int thumbnail_width = entry.data.i32[0];
+    int thumbnail_height = entry.data.i32[1];
+    if (thumbnail_width > 0 && thumbnail_height > 0) {
+      if (!utils->SetThumbnailSize(static_cast<uint16_t>(thumbnail_width),
+                                   static_cast<uint16_t>(thumbnail_height))) {
+        LOGF(ERROR) << "Setting thumbnail size failed.";
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
 }  // namespace arc
diff --git a/hal/usb/image_processor.h b/hal/usb/image_processor.h
index 29498fc..7d05a69 100644
--- a/hal/usb/image_processor.h
+++ b/hal/usb/image_processor.h
@@ -13,6 +13,7 @@
 // Declarations of HAL_PIXEL_FORMAT_XXX.
 #include <system/graphics.h>
 
+#include "arc/camera_metadata.h"
 #include "hal/usb/frame_buffer.h"
 
 namespace arc {
@@ -29,7 +30,15 @@
   // fill |data|, |buffer_size|, |width|, and |height| of |out_frame|. The
   // function will fill |out_frame->data_size|. Return non-zero error code on
   // failure; return 0 on success.
-  static int Convert(const FrameBuffer& in_frame, FrameBuffer* out_frame);
+  static int ConvertFormat(const CameraMetadata& metadata,
+                           const FrameBuffer& in_frame,
+                           FrameBuffer* out_frame);
+
+  // Scale image size according to |in_frame| and |out_frame|. Only support
+  // V4L2_PIX_FMT_YUV420 format. Caller should fill |data|, |width|, |height|,
+  // and |buffer_size| of |out_frame|. The function will fill |data_size| and
+  // |fourcc| of |out_frame|.
+  static int Scale(const FrameBuffer& in_frame, FrameBuffer* out_frame);
 };
 
 }  // namespace arc
diff --git a/hal/usb/stream_format.cc b/hal/usb/stream_format.cc
index 253195c..1277e76 100644
--- a/hal/usb/stream_format.cc
+++ b/hal/usb/stream_format.cc
@@ -6,9 +6,9 @@
 #include "hal/usb/stream_format.h"
 
 #include <linux/videodev2.h>
+#include <system/graphics.h>
 
 #include "arc/common.h"
-#include "hal/usb/metadata_handler.h"
 
 namespace arc {
 
@@ -80,7 +80,7 @@
     case HAL_PIXEL_FORMAT_RGBA_8888:
       return V4L2_PIX_FMT_RGB32;
     case HAL_PIXEL_FORMAT_BLOB:
-      return V4L2_PIX_FMT_MJPEG;
+      return V4L2_PIX_FMT_JPEG;
     case HAL_PIXEL_FORMAT_YCbCr_420_888:
       // This is a flexible YUV format that depends on platform. Different
       // platform may have different format. It can be YVU420 or NV12. Now we