Snap for 8270922 from 7c4bb1d65158c777905fbf4e7bc4172cf2f8589d to t-keystone-qcom-release

Change-Id: I1ee5b99a88788fda0f07bb8e883bc2e437b50e7c
diff --git a/Android.bp b/Android.bp
index ebdd254..acaeb25 100644
--- a/Android.bp
+++ b/Android.bp
@@ -58,6 +58,10 @@
             ],
         },
     },
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.bluetooth",
+    ],
     min_sdk_version: "29",
     host_supported: true,
 
diff --git a/include/hardware/audio.h b/include/hardware/audio.h
index adec3da..daaa16f 100644
--- a/include/hardware/audio.h
+++ b/include/hardware/audio.h
@@ -247,6 +247,10 @@
     struct record_track_metadata_v7* tracks;
 } sink_metadata_v7_t;
 
+/** output stream callback method to indicate changes in supported latency modes */
+typedef void (*stream_latency_mode_callback_t)(
+        audio_latency_mode_t *modes, size_t num_modes, void *cookie);
+
 /**
  * audio_stream_out is the abstraction interface for the audio output hardware.
  *
@@ -533,7 +537,68 @@
      */
     int (*set_playback_rate_parameters)(struct audio_stream_out *stream,
                                         const audio_playback_rate_t *playbackRate);
+
+    /**
+     * Indicates the requested latency mode for this output stream.
+     *
+     * The requested mode can be one of the modes returned by
+     * get_recommended_latency_modes().
+     *
+     * Support for this method is optional but mandated on specific spatial audio
+     * streams indicated by AUDIO_OUTPUT_FLAG_SPATIALIZER flag if they can be routed
+     * to a BT classic sink.
+     *
+     * \param[in] stream the stream object.
+     * \param[in] mode the requested latency mode.
+     * \return 0 in case of success.
+     *         -EINVAL if the arguments are invalid
+     *         -ENOSYS if the function is not available
+     */
+    int (*set_latency_mode)(struct audio_stream_out *stream, audio_latency_mode_t mode);
+
+    /**
+     * Indicates which latency modes are currently supported on this output stream.
+     * If the transport protocol (e.g Bluetooth A2DP) used by this output stream to reach
+     * the output device supports variable latency modes, the HAL indicates which
+     * modes are currently supported.
+     * The framework can then call setLatencyMode() with one of the supported modes to select
+     * the desired operation mode.
+     *
+     * Support for this method is optional but mandated on specific spatial audio
+     * streams indicated by AUDIO_OUTPUT_FLAG_SPATIALIZER flag if they can be routed
+     * to a BT classic sink.
+     *
+     * \return 0 in case of success.
+     *         -EINVAL if the arguments are invalid
+     *         -ENOSYS if the function is not available
+     * \param[in] stream the stream object.
+     * \param[out] modes the supported latency modes.
+     * \param[in/out] num_modes as input the maximum number of modes to return,
+     *                as output the actual number of modes returned.
+     */
+    int (*get_recommended_latency_modes)(struct audio_stream_out *stream,
+            audio_latency_mode_t *modes, size_t *num_modes);
+
+    /**
+     * Set the callback interface for notifying changes in supported latency modes.
+     *
+     * Calling this method with a null pointer will result in clearing a previously set callback.
+     *
+     * Support for this method is optional but mandated on specific spatial audio
+     * streams indicated by AUDIO_OUTPUT_FLAG_SPATIALIZER flag if they can be routed
+     * to a BT classic sink.
+     *
+     * \param[in] stream the stream object.
+     * \param[in] callback the registered callback or null to unregister.
+     * \param[in] cookie the context to pass when calling the callback.
+     * \return 0 in case of success.
+     *         -EINVAL if the arguments are invalid
+     *         -ENOSYS if the function is not available
+     */
+    int (*set_latency_mode_callback)(struct audio_stream_out *stream,
+            stream_latency_mode_callback_t callback, void *cookie);
 };
+
 typedef struct audio_stream_out audio_stream_out_t;
 
 struct audio_stream_in {
@@ -982,6 +1047,24 @@
      */
     int (*get_audio_port_v7)(struct audio_hw_device *dev,
                              struct audio_port_v7 *port);
+
+    /**
+     * Called when the state of the connection of an external device has been changed.
+     * The "port" parameter is only used as input and besides identifying the device
+     * port, also may contain additional information such as extra audio descriptors.
+     *
+     * HAL version 3.2 and higher only. If the HAL does not implement this method,
+     * it must leave the function entry as null, or return -ENOSYS. In this case
+     * the framework will use 'set_parameters', which can only pass the device address.
+     *
+     * @param dev the audio HAL device context.
+     * @param port device port identification and extra information.
+     * @param connected whether the external device is connected.
+     * @return retval operation completion status.
+     */
+    int (*set_device_connected_state_v7)(struct audio_hw_device *dev,
+                                         struct audio_port_v7 *port,
+                                         bool connected);
 };
 typedef struct audio_hw_device audio_hw_device_t;
 
diff --git a/include/hardware/sensors-base.h b/include/hardware/sensors-base.h
index b88a8c2..dbf99f5 100644
--- a/include/hardware/sensors-base.h
+++ b/include/hardware/sensors-base.h
@@ -53,6 +53,11 @@
     SENSOR_TYPE_ACCELEROMETER_UNCALIBRATED = 35,
     SENSOR_TYPE_HINGE_ANGLE = 36,
     SENSOR_TYPE_HEAD_TRACKER = 37,
+    SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES = 38,
+    SENSOR_TYPE_GYROSCOPE_LIMITED_AXES = 39,
+    SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED = 40,
+    SENSOR_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED = 41,
+    SENSOR_TYPE_HEADING = 42,
     SENSOR_TYPE_DEVICE_PRIVATE_BASE = 65536 /* 0x10000 */,
 };
 
diff --git a/include/hardware/sensors.h b/include/hardware/sensors.h
index 5686516..6f4baf8 100644
--- a/include/hardware/sensors.h
+++ b/include/hardware/sensors.h
@@ -187,6 +187,11 @@
 #define SENSOR_STRING_TYPE_ACCELEROMETER_UNCALIBRATED   "android.sensor.accelerometer_uncalibrated"
 #define SENSOR_STRING_TYPE_HINGE_ANGLE                  "android.sensor.hinge_angle"
 #define SENSOR_STRING_TYPE_HEAD_TRACKER                 "android.sensor.head_tracker"
+#define SENSOR_STRING_TYPE_ACCELEROMETER_LIMITED_AXES   "android.sensor.accelerometer_limited_axes"
+#define SENSOR_STRING_TYPE_GYROSCOPE_LIMITED_AXES       "android.sensor.gyroscope_limited_axes"
+#define SENSOR_STRING_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED "android.sensor.accelerometer_limited_axes_uncalibrated"
+#define SENSOR_STRING_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED "android.sensor.gyroscope_limited_axes_uncalibrated"
+#define SENSOR_STRING_TYPE_HEADING                      "android.sensor.heading"
 
 /**
  * Values returned by the accelerometer in various locations in the universe.
@@ -303,6 +308,66 @@
 } head_tracker_event_t;
 
 /**
+ * limited axes imu event data
+ */
+typedef struct {
+    union {
+        float calib[3];
+        struct {
+            float x;
+            float y;
+            float z;
+        };
+    };
+    union {
+        float supported[3];
+        struct {
+            float x_supported;
+            float y_supported;
+            float z_supported;
+        };
+    };
+} limited_axes_imu_event_t;
+
+/**
+ * limited axes uncalibrated imu event data
+ */
+typedef struct {
+    union {
+        float uncalib[3];
+        struct {
+            float x_uncalib;
+            float y_uncalib;
+            float z_uncalib;
+        };
+    };
+    union {
+        float bias[3];
+        struct {
+            float x_bias;
+            float y_bias;
+            float z_bias;
+        };
+    };
+    union {
+        float supported[3];
+        struct {
+            float x_supported;
+            float y_supported;
+            float z_supported;
+        };
+    };
+} limited_axes_imu_uncalibrated_event_t;
+
+/**
+ * Heading event data
+ */
+typedef struct {
+  float heading;
+  float accuracy;
+} heading_event_t;
+
+/**
  * Union of the various types of sensor data
  * that can be returned.
  */
@@ -382,6 +447,23 @@
 
             /* vector describing head orientation (added for legacy code support only) */
             head_tracker_event_t head_tracker;
+
+            /*
+             * limited axes imu event, See
+             * SENSOR_TYPE_GYROSCOPE_LIMITED_AXES and
+             * SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES for details.
+             */
+            limited_axes_imu_event_t limited_axes_imu;
+
+            /*
+             * limited axes imu uncalibrated event, See
+             * SENSOR_TYPE_GYROSCOPE_LIMITED_AXES_UNCALIBRATED and
+             * SENSOR_TYPE_ACCELEROMETER_LIMITED_AXES_UNCALIBRATED for details.
+             */
+            limited_axes_imu_uncalibrated_event_t limited_axes_imu_uncalibrated;
+
+            /* heading data containing value in degrees and its accuracy */
+            heading_event_t heading;
         };
 
         union {
diff --git a/modules/audio_remote_submix/audio_hw.cpp b/modules/audio_remote_submix/audio_hw.cpp
index 42d3b98..f96854b 100644
--- a/modules/audio_remote_submix/audio_hw.cpp
+++ b/modules/audio_remote_submix/audio_hw.cpp
@@ -63,7 +63,7 @@
 #endif // SUBMIX_VERBOSE_LOGGING
 
 // NOTE: This value will be rounded up to the nearest power of 2 by MonoPipe().
-#define DEFAULT_PIPE_SIZE_IN_FRAMES  (1024*4)
+#define DEFAULT_PIPE_SIZE_IN_FRAMES  (1024*4) // size at default sample rate
 // Value used to divide the MonoPipe() buffer into segments that are written to the source and
 // read from the sink.  The maximum latency of the device is the size of the MonoPipe's buffer
 // the minimum latency is the MonoPipe buffer size divided by this value.
@@ -208,6 +208,11 @@
     return return_value;
 }
 
+static size_t pipe_size_in_frames(const uint32_t sample_rate)
+{
+    return DEFAULT_PIPE_SIZE_IN_FRAMES * ((float) sample_rate / DEFAULT_SAMPLE_RATE_HZ);
+}
+
 // Determine whether the specified sample rate is supported, if it is return the specified sample
 // rate, otherwise return the default sample rate for the submix module.
 static uint32_t get_supported_sample_rate(uint32_t sample_rate)
@@ -1289,8 +1294,10 @@
     // Store a pointer to the device from the output stream.
     out->dev = rsxadev;
     // Initialize the pipe.
-    ALOGV("adev_open_output_stream(): about to create pipe at index %d", route_idx);
-    submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
+    const size_t pipeSizeInFrames = pipe_size_in_frames(config->sample_rate);
+    ALOGI("adev_open_output_stream(): about to create pipe at index %d, rate %u, pipe size %zu",
+          route_idx, config->sample_rate, pipeSizeInFrames);
+    submix_audio_device_create_pipe_l(rsxadev, config, pipeSizeInFrames,
             DEFAULT_PIPE_PERIOD_COUNT, NULL, out, address, route_idx);
 #if LOG_STREAMS_TO_FILES
     out->log_fd = open(LOG_STREAM_OUT_FILENAME, O_CREAT | O_TRUNC | O_WRONLY,
@@ -1419,7 +1426,8 @@
         const size_t frame_size_in_bytes = audio_channel_count_from_in_mask(config->channel_mask) *
                 audio_bytes_per_sample(config->format);
         if (max_buffer_period_size_frames == 0) {
-            max_buffer_period_size_frames = DEFAULT_PIPE_SIZE_IN_FRAMES;
+            max_buffer_period_size_frames =
+                    pipe_size_in_frames(get_supported_sample_rate(config->sample_rate));;
         }
         const size_t buffer_size = max_buffer_period_size_frames * frame_size_in_bytes;
         SUBMIX_ALOGV("adev_get_input_buffer_size() returns %zu bytes, %zu frames",
@@ -1532,8 +1540,10 @@
 
     in->read_error_count = 0;
     // Initialize the pipe.
-    ALOGV("adev_open_input_stream(): about to create pipe");
-    submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
+    const size_t pipeSizeInFrames = pipe_size_in_frames(config->sample_rate);
+    ALOGI("adev_open_input_stream(): about to create pipe at index %d, rate %u, pipe size %zu",
+          route_idx, config->sample_rate, pipeSizeInFrames);
+    submix_audio_device_create_pipe_l(rsxadev, config, pipeSizeInFrames,
                                     DEFAULT_PIPE_PERIOD_COUNT, in, NULL, address, route_idx);
 
     sp <MonoPipe> sink = rsxadev->routes[route_idx].rsxSink;
diff --git a/modules/sensors/dynamic_sensor/HidRawSensor.cpp b/modules/sensors/dynamic_sensor/HidRawSensor.cpp
index 6654228..4520dda 100644
--- a/modules/sensors/dynamic_sensor/HidRawSensor.cpp
+++ b/modules/sensors/dynamic_sensor/HidRawSensor.cpp
@@ -635,8 +635,8 @@
         return false;
     }
 
-    mFeatureInfo.type = SENSOR_TYPE_DEVICE_PRIVATE_BASE;
-    mFeatureInfo.typeString = CUSTOM_TYPE_PREFIX + "headtracker";
+    mFeatureInfo.type = SENSOR_TYPE_HEAD_TRACKER;
+    mFeatureInfo.typeString = SENSOR_STRING_TYPE_HEAD_TRACKER;
     mFeatureInfo.reportModeFlag = SENSOR_FLAG_CONTINUOUS_MODE;
     mFeatureInfo.permission = "";
     mFeatureInfo.isWakeUp = false;
@@ -1011,6 +1011,50 @@
         .type = mSensor.type
     };
     bool valid = true;
+
+    switch (mFeatureInfo.type) {
+        case SENSOR_TYPE_HEAD_TRACKER:
+            valid = getHeadTrackerEventData(message, &event);
+            break;
+        default:
+            valid = getSensorEventData(message, &event);
+            break;
+    }
+    if (!valid) {
+        LOG_E << "Invalid data observed in decoding, discard" << LOG_ENDL;
+        return;
+    }
+    event.timestamp = -1;
+    generateEvent(event);
+}
+
+bool HidRawSensor::getHeadTrackerEventData(const std::vector<uint8_t> &message,
+                                           sensors_event_t *event) {
+    head_tracker_event_t *head_tracker;
+
+    head_tracker = &(event->head_tracker);
+    if (!getReportFieldValue(message, &(mTranslateTable[0]),
+                             &(head_tracker->rx))
+            || !getReportFieldValue(message, &(mTranslateTable[1]),
+                                    &(head_tracker->ry))
+            || !getReportFieldValue(message, &(mTranslateTable[2]),
+                                    &(head_tracker->rz))
+            || !getReportFieldValue(message, &(mTranslateTable[3]),
+                                    &(head_tracker->vx))
+            || !getReportFieldValue(message, &(mTranslateTable[4]),
+                                    &(head_tracker->vy))
+            || !getReportFieldValue(message, &(mTranslateTable[5]),
+                                    &(head_tracker->vz))
+            || !getReportFieldValue(message, &(mTranslateTable[6]),
+                                    &(head_tracker->discontinuity_count))) {
+        return false;
+    }
+
+    return true;
+}
+
+bool HidRawSensor::getSensorEventData(const std::vector<uint8_t> &message,
+                                      sensors_event_t *event) {
     for (const auto &rec : mTranslateTable) {
         int64_t v = (message[rec.byteOffset + rec.byteSize - 1] & 0x80) ? -1 : 0;
         for (int i = static_cast<int>(rec.byteSize) - 1; i >= 0; --i) {
@@ -1020,26 +1064,23 @@
         switch (rec.type) {
             case TYPE_FLOAT:
                 if (v > rec.maxValue || v < rec.minValue) {
-                    valid = false;
+                    return false;
                 }
-                event.data[rec.index] = rec.a * (v + rec.b);
+                event->data[rec.index] = rec.a * (v + rec.b);
                 break;
             case TYPE_INT64:
                 if (v > rec.maxValue || v < rec.minValue) {
-                    valid = false;
+                    return false;
                 }
-                event.u64.data[rec.index] = v + rec.b;
+                event->u64.data[rec.index] = v + rec.b;
                 break;
             case TYPE_ACCURACY:
-                event.magnetic.status = (v & 0xFF) + rec.b;
+                event->magnetic.status = (v & 0xFF) + rec.b;
                 break;
         }
     }
-    if (!valid) {
-        LOG_V << "Range error observed in decoding, discard" << LOG_ENDL;
-    }
-    event.timestamp = -1;
-    generateEvent(event);
+
+    return true;
 }
 
 std::string HidRawSensor::dump() const {
diff --git a/modules/sensors/dynamic_sensor/HidRawSensor.h b/modules/sensors/dynamic_sensor/HidRawSensor.h
index f6d13b5..66843fc 100644
--- a/modules/sensors/dynamic_sensor/HidRawSensor.h
+++ b/modules/sensors/dynamic_sensor/HidRawSensor.h
@@ -46,6 +46,14 @@
     // handle input report received
     void handleInput(uint8_t id, const std::vector<uint8_t> &message);
 
+    // get head tracker sensor event data
+    bool getHeadTrackerEventData(const std::vector<uint8_t> &message,
+                                 sensors_event_t *event);
+
+    // get generic sensor event data
+    bool getSensorEventData(const std::vector<uint8_t> &message,
+                            sensors_event_t *event);
+
     // indicate if the HidRawSensor is a valid one
     bool isValid() const { return mValid; };
 
@@ -141,6 +149,33 @@
     // process HID snesor spec defined orientation(quaternion) sensor usages.
     bool processQuaternionUsage(const std::vector<HidParser::ReportPacket> &packets);
 
+    // get the value of a report field
+    template<typename ValueType>
+    bool getReportFieldValue(const std::vector<uint8_t> &message,
+                             ReportTranslateRecord* rec, ValueType* value) {
+        bool valid = true;
+        int64_t v;
+
+        v = (message[rec->byteOffset + rec->byteSize - 1] & 0x80) ? -1 : 0;
+        for (int i = static_cast<int>(rec->byteSize) - 1; i >= 0; --i) {
+            v = (v << 8) | message[rec->byteOffset + i]; // HID is little endian
+        }
+        if (v > rec->maxValue || v < rec->minValue) {
+            valid = false;
+        }
+
+        switch (rec->type) {
+            case TYPE_FLOAT:
+                *value = rec->a * (v + rec->b);
+                break;
+            case TYPE_INT64:
+                *value = v + rec->b;
+                break;
+        }
+
+        return valid;
+    }
+
     // dump data for test/debug purpose
     std::string dump() const;