Merge "Merge remote-tracking branch into aosp"
diff --git a/Android.mk b/Android.mk
index 111e272..db74eaf 100644
--- a/Android.mk
+++ b/Android.mk
@@ -9,6 +9,7 @@
include $(MY_LOCAL_PATH)/voice_processing/Android.mk
include $(MY_LOCAL_PATH)/mm-audio/Android.mk
include $(MY_LOCAL_PATH)/policy_hal/Android.mk
+include $(MY_LOCAL_PATH)/visualizer/Android.mk
endif
endif
diff --git a/hal/Android.mk b/hal/Android.mk
index 7d681ce..cc6b62e 100644
--- a/hal/Android.mk
+++ b/hal/Android.mk
@@ -63,11 +63,13 @@
liblog \
libcutils \
libtinyalsa \
+ libtinycompress \
libaudioroute \
libdl
LOCAL_C_INCLUDES += \
external/tinyalsa/include \
+ external/tinycompress/include \
$(call include-path-for, audio-route) \
$(call include-path-for, audio-effects) \
$(LOCAL_PATH)/$(AUDIO_PLATFORM) \
diff --git a/hal/audio_hw.c b/hal/audio_hw.c
index abda10c..1bcb25a 100644
--- a/hal/audio_hw.c
+++ b/hal/audio_hw.c
@@ -19,7 +19,12 @@
#define LOG_TAG "audio_hw_primary"
/*#define LOG_NDEBUG 0*/
-#define LOG_NDDEBUG 0
+/*#define VERY_VERY_VERBOSE_LOGGING*/
+#ifdef VERY_VERY_VERBOSE_LOGGING
+#define ALOGVV ALOGV
+#else
+#define ALOGVV(a...) do { } while(0)
+#endif
#include <errno.h>
#include <pthread.h>
@@ -27,12 +32,18 @@
#include <sys/time.h>
#include <stdlib.h>
#include <math.h>
+#include <dlfcn.h>
+#include <sys/resource.h>
+#include <sys/prctl.h>
#include <cutils/log.h>
#include <cutils/str_parms.h>
#include <cutils/properties.h>
+#include <cutils/atomic.h>
+#include <cutils/sched_policy.h>
#include <hardware/audio_effect.h>
+#include <system/thread_defs.h>
#include <audio_effects/effect_aec.h>
#include <audio_effects/effect_ns.h>
#include "audio_hw.h"
@@ -40,6 +51,14 @@
#include <platform.h>
#include "audio_extn.h"
+#include "sound/compress_params.h"
+
+#define COMPRESS_OFFLOAD_FRAGMENT_SIZE (32 * 1024)
+#define COMPRESS_OFFLOAD_NUM_FRAGMENTS 4
+/* ToDo: Check and update a proper value in msec */
+#define COMPRESS_OFFLOAD_PLAYBACK_LATENCY 96
+#define COMPRESS_PLAYBACK_VOLUME_MAX 0x2000
+
struct pcm_config pcm_config_deep_buffer = {
.channels = 2,
.rate = DEFAULT_OUTPUT_SAMPLING_RATE,
@@ -83,10 +102,12 @@
[USECASE_AUDIO_PLAYBACK_DEEP_BUFFER] = "deep-buffer-playback",
[USECASE_AUDIO_PLAYBACK_LOW_LATENCY] = "low-latency-playback",
[USECASE_AUDIO_PLAYBACK_MULTI_CH] = "multi-channel-playback",
+ [USECASE_AUDIO_PLAYBACK_OFFLOAD] = "compress-offload-playback",
[USECASE_AUDIO_RECORD] = "audio-record",
[USECASE_AUDIO_RECORD_LOW_LATENCY] = "low-latency-record",
[USECASE_AUDIO_PLAYBACK_FM] = "play-fm",
[USECASE_VOICE_CALL] = "voice-call",
+
[USECASE_VOICE2_CALL] = "voice2-call",
[USECASE_VOLTE_CALL] = "volte-call",
[USECASE_QCHAT_CALL] = "qchat-call",
@@ -112,6 +133,34 @@
static struct audio_device *adev = NULL;
static pthread_mutex_t adev_init_lock;
static bool is_adev_initialised = false;
+static int set_voice_volume_l(struct audio_device *adev, float volume);
+
+static bool is_supported_format(audio_format_t format)
+{
+ if (format == AUDIO_FORMAT_MP3 ||
+ format == AUDIO_FORMAT_AAC)
+ return true;
+
+ return false;
+}
+
+static int get_snd_codec_id(audio_format_t format)
+{
+ int id = 0;
+
+ switch (format) {
+ case AUDIO_FORMAT_MP3:
+ id = SND_AUDIOCODEC_MP3;
+ break;
+ case AUDIO_FORMAT_AAC:
+ id = SND_AUDIOCODEC_AAC;
+ break;
+ default:
+ ALOGE("%s: Unsupported audio format", __func__);
+ }
+
+ return id;
+}
static int enable_audio_route(struct audio_device *adev,
struct audio_usecase *usecase,
@@ -693,6 +742,222 @@
return ret;
}
+/* must be called with out->lock locked */
+static int send_offload_cmd_l(struct stream_out* out, int command)
+{
+ struct offload_cmd *cmd = (struct offload_cmd *)calloc(1, sizeof(struct offload_cmd));
+
+ ALOGVV("%s %d", __func__, command);
+
+ cmd->cmd = command;
+ list_add_tail(&out->offload_cmd_list, &cmd->node);
+ pthread_cond_signal(&out->offload_cond);
+ return 0;
+}
+
+/* must be called iwth out->lock locked */
+static void stop_compressed_output_l(struct stream_out *out)
+{
+ out->offload_state = OFFLOAD_STATE_IDLE;
+ out->playback_started = 0;
+ out->send_new_metadata = 1;
+ if (out->compr != NULL) {
+ compress_stop(out->compr);
+ while (out->offload_thread_blocked) {
+ pthread_cond_wait(&out->cond, &out->lock);
+ }
+ }
+}
+
+static void *offload_thread_loop(void *context)
+{
+ struct stream_out *out = (struct stream_out *) context;
+ struct listnode *item;
+
+ out->offload_state = OFFLOAD_STATE_IDLE;
+ out->playback_started = 0;
+
+ setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO);
+ set_sched_policy(0, SP_FOREGROUND);
+ prctl(PR_SET_NAME, (unsigned long)"Offload Callback", 0, 0, 0);
+
+ ALOGV("%s", __func__);
+ pthread_mutex_lock(&out->lock);
+ for (;;) {
+ struct offload_cmd *cmd = NULL;
+ stream_callback_event_t event;
+ bool send_callback = false;
+
+ ALOGVV("%s offload_cmd_list %d out->offload_state %d",
+ __func__, list_empty(&out->offload_cmd_list),
+ out->offload_state);
+ if (list_empty(&out->offload_cmd_list)) {
+ ALOGV("%s SLEEPING", __func__);
+ pthread_cond_wait(&out->offload_cond, &out->lock);
+ ALOGV("%s RUNNING", __func__);
+ continue;
+ }
+
+ item = list_head(&out->offload_cmd_list);
+ cmd = node_to_item(item, struct offload_cmd, node);
+ list_remove(item);
+
+ ALOGVV("%s STATE %d CMD %d out->compr %p",
+ __func__, out->offload_state, cmd->cmd, out->compr);
+
+ if (cmd->cmd == OFFLOAD_CMD_EXIT) {
+ free(cmd);
+ break;
+ }
+
+ if (out->compr == NULL) {
+ ALOGE("%s: Compress handle is NULL", __func__);
+ pthread_cond_signal(&out->cond);
+ continue;
+ }
+ out->offload_thread_blocked = true;
+ pthread_mutex_unlock(&out->lock);
+ send_callback = false;
+ switch(cmd->cmd) {
+ case OFFLOAD_CMD_WAIT_FOR_BUFFER:
+ compress_wait(out->compr, -1);
+ send_callback = true;
+ event = STREAM_CBK_EVENT_WRITE_READY;
+ break;
+ case OFFLOAD_CMD_PARTIAL_DRAIN:
+ compress_next_track(out->compr);
+ compress_partial_drain(out->compr);
+ send_callback = true;
+ event = STREAM_CBK_EVENT_DRAIN_READY;
+ break;
+ case OFFLOAD_CMD_DRAIN:
+ compress_drain(out->compr);
+ send_callback = true;
+ event = STREAM_CBK_EVENT_DRAIN_READY;
+ break;
+ default:
+ ALOGE("%s unknown command received: %d", __func__, cmd->cmd);
+ break;
+ }
+ pthread_mutex_lock(&out->lock);
+ out->offload_thread_blocked = false;
+ pthread_cond_signal(&out->cond);
+ if (send_callback) {
+ out->offload_callback(event, NULL, out->offload_cookie);
+ }
+ free(cmd);
+ }
+
+ pthread_cond_signal(&out->cond);
+ while (!list_empty(&out->offload_cmd_list)) {
+ item = list_head(&out->offload_cmd_list);
+ list_remove(item);
+ free(node_to_item(item, struct offload_cmd, node));
+ }
+ pthread_mutex_unlock(&out->lock);
+
+ return NULL;
+}
+
+static int create_offload_callback_thread(struct stream_out *out)
+{
+ pthread_cond_init(&out->offload_cond, (const pthread_condattr_t *) NULL);
+ list_init(&out->offload_cmd_list);
+ pthread_create(&out->offload_thread, (const pthread_attr_t *) NULL,
+ offload_thread_loop, out);
+ return 0;
+}
+
+static int destroy_offload_callback_thread(struct stream_out *out)
+{
+ pthread_mutex_lock(&out->lock);
+ stop_compressed_output_l(out);
+ send_offload_cmd_l(out, OFFLOAD_CMD_EXIT);
+
+ pthread_mutex_unlock(&out->lock);
+ pthread_join(out->offload_thread, (void **) NULL);
+ pthread_cond_destroy(&out->offload_cond);
+
+ return 0;
+}
+
+static bool allow_hdmi_channel_config(struct audio_device *adev)
+{
+ struct listnode *node;
+ struct audio_usecase *usecase;
+ bool ret = true;
+
+ list_for_each(node, &adev->usecase_list) {
+ usecase = node_to_item(node, struct audio_usecase, list);
+ if (usecase->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL) {
+ /*
+ * If voice call is already existing, do not proceed further to avoid
+ * disabling/enabling both RX and TX devices, CSD calls, etc.
+ * Once the voice call done, the HDMI channels can be configured to
+ * max channels of remaining use cases.
+ */
+ if (usecase->id == USECASE_VOICE_CALL) {
+ ALOGD("%s: voice call is active, no change in HDMI channels",
+ __func__);
+ ret = false;
+ break;
+ } else if (usecase->id == USECASE_AUDIO_PLAYBACK_MULTI_CH) {
+ ALOGD("%s: multi channel playback is active, "
+ "no change in HDMI channels", __func__);
+ ret = false;
+ break;
+ }
+ }
+ }
+ return ret;
+}
+
+static int check_and_set_hdmi_channels(struct audio_device *adev,
+ unsigned int channels)
+{
+ struct listnode *node;
+ struct audio_usecase *usecase;
+
+ /* Check if change in HDMI channel config is allowed */
+ if (!allow_hdmi_channel_config(adev))
+ return 0;
+
+ if (channels == adev->cur_hdmi_channels) {
+ ALOGD("%s: Requested channels are same as current", __func__);
+ return 0;
+ }
+
+ platform_set_hdmi_channels(adev->platform, channels);
+ adev->cur_hdmi_channels = channels;
+
+ /*
+ * Deroute all the playback streams routed to HDMI so that
+ * the back end is deactivated. Note that backend will not
+ * be deactivated if any one stream is connected to it.
+ */
+ list_for_each(node, &adev->usecase_list) {
+ usecase = node_to_item(node, struct audio_usecase, list);
+ if (usecase->type == PCM_PLAYBACK &&
+ usecase->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL) {
+ disable_audio_route(adev, usecase, true);
+ }
+ }
+
+ /*
+ * Enable all the streams disabled above. Now the HDMI backend
+ * will be activated with new channel configuration
+ */
+ list_for_each(node, &adev->usecase_list) {
+ usecase = node_to_item(node, struct audio_usecase, list);
+ if (usecase->type == PCM_PLAYBACK &&
+ usecase->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL) {
+ enable_audio_route(adev, usecase, true);
+ }
+ }
+
+ return 0;
+}
+
static int stop_output_stream(struct stream_out *out)
{
int i, ret = 0;
@@ -708,6 +973,10 @@
return -EINVAL;
}
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD &&
+ adev->visualizer_stop_output != NULL)
+ adev->visualizer_stop_output(out->handle);
+
/* 1. Get and set stream specific mixer controls */
disable_audio_route(adev, uc_info, true);
@@ -717,6 +986,10 @@
list_remove(&uc_info->list);
free(uc_info);
+ /* Must be called after removing the usecase from list */
+ if (out->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL)
+ check_and_set_hdmi_channels(adev, DEFAULT_HDMI_OUT_CHANNELS);
+
ALOGV("%s: exit: status(%d)", __func__, ret);
return ret;
}
@@ -745,24 +1018,46 @@
uc_info->in_snd_device = SND_DEVICE_NONE;
uc_info->out_snd_device = SND_DEVICE_NONE;
+ /* This must be called before adding this usecase to the list */
+ if (out->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL)
+ check_and_set_hdmi_channels(adev, out->config.channels);
+
list_add_tail(&adev->usecase_list, &uc_info->list);
select_devices(adev, out->usecase);
ALOGV("%s: Opening PCM device card_id(%d) device_id(%d)",
__func__, 0, out->pcm_device_id);
- out->pcm = pcm_open(SOUND_CARD, out->pcm_device_id,
- PCM_OUT, &out->config);
- if (out->pcm && !pcm_is_ready(out->pcm)) {
- ALOGE("%s: %s", __func__, pcm_get_error(out->pcm));
- pcm_close(out->pcm);
+ if (out->usecase != USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ out->pcm = pcm_open(SOUND_CARD, out->pcm_device_id,
+ PCM_OUT | PCM_MONOTONIC, &out->config);
+ if (out->pcm && !pcm_is_ready(out->pcm)) {
+ ALOGE("%s: %s", __func__, pcm_get_error(out->pcm));
+ pcm_close(out->pcm);
+ out->pcm = NULL;
+ ret = -EIO;
+ goto error_open;
+ }
+ } else {
out->pcm = NULL;
- ret = -EIO;
- goto error_pcm_open;
+ out->compr = compress_open(SOUND_CARD, out->pcm_device_id,
+ COMPRESS_IN, &out->compr_config);
+ if (out->compr && !is_compress_ready(out->compr)) {
+ ALOGE("%s: %s", __func__, compress_get_error(out->compr));
+ compress_close(out->compr);
+ out->compr = NULL;
+ ret = -EIO;
+ goto error_open;
+ }
+ if (out->offload_callback)
+ compress_nonblock(out->compr, out->non_blocking);
+
+ if (adev->visualizer_start_output != NULL)
+ adev->visualizer_start_output(out->handle);
}
ALOGV("%s: exit", __func__);
return 0;
-error_pcm_open:
+error_open:
stop_output_stream(out);
error_config:
return ret;
@@ -819,7 +1114,7 @@
{
struct stream_out *out = (struct stream_out *)stream;
- return out->config.rate;
+ return out->sample_rate;
}
static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate)
@@ -831,6 +1126,10 @@
{
struct stream_out *out = (struct stream_out *)stream;
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ return out->compr_config.fragment_size;
+ }
+
return out->config.period_size * audio_stream_frame_size(stream);
}
@@ -843,7 +1142,9 @@
static audio_format_t out_get_format(const struct audio_stream *stream)
{
- return AUDIO_FORMAT_PCM_16_BIT;
+ struct stream_out *out = (struct stream_out *)stream;
+
+ return out->format;
}
static int out_set_format(struct audio_stream *stream, audio_format_t format)
@@ -855,15 +1156,26 @@
{
struct stream_out *out = (struct stream_out *)stream;
struct audio_device *adev = out->dev;
+
ALOGV("%s: enter: usecase(%d: %s)", __func__,
out->usecase, use_case_table[out->usecase]);
- pthread_mutex_lock(&out->lock);
+ pthread_mutex_lock(&out->lock);
if (!out->standby) {
out->standby = true;
- if (out->pcm) {
- pcm_close(out->pcm);
- out->pcm = NULL;
+ if (out->usecase != USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ if (out->pcm) {
+ pcm_close(out->pcm);
+ out->pcm = NULL;
+ }
+ } else {
+ stop_compressed_output_l(out);
+ out->gapless_mdata.encoder_delay = 0;
+ out->gapless_mdata.encoder_padding = 0;
+ if (out->compr != NULL) {
+ compress_close(out->compr);
+ out->compr = NULL;
+ }
}
pthread_mutex_lock(&adev->lock);
stop_output_stream(out);
@@ -879,6 +1191,39 @@
return 0;
}
+static int parse_compress_metadata(struct stream_out *out, struct str_parms *parms)
+{
+ int ret = 0;
+ char value[32];
+ struct compr_gapless_mdata tmp_mdata;
+
+ if (!out || !parms) {
+ return -EINVAL;
+ }
+
+ ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES, value, sizeof(value));
+ if (ret >= 0) {
+ tmp_mdata.encoder_delay = atoi(value); //whats a good limit check?
+ } else {
+ return -EINVAL;
+ }
+
+ ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES, value, sizeof(value));
+ if (ret >= 0) {
+ tmp_mdata.encoder_padding = atoi(value);
+ } else {
+ return -EINVAL;
+ }
+
+ out->gapless_mdata = tmp_mdata;
+ out->send_new_metadata = 1;
+ ALOGV("%s new encoder delay %u and padding %u", __func__,
+ out->gapless_mdata.encoder_delay, out->gapless_mdata.encoder_padding);
+
+ return 0;
+}
+
+
static int out_set_parameters(struct audio_stream *stream, const char *kvpairs)
{
struct stream_out *out = (struct stream_out *)stream;
@@ -961,6 +1306,9 @@
audio_extn_set_parameters(adev, parms);
pthread_mutex_unlock(&adev->lock);
}
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ parse_compress_metadata(out, parms);
+ }
str_parms_destroy(parms);
ALOGV("%s: exit: code(%d)", __func__, ret);
@@ -1010,18 +1358,40 @@
{
struct stream_out *out = (struct stream_out *)stream;
- return (out->config.period_count * out->config.period_size * 1000) / (out->config.rate);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD)
+ return COMPRESS_OFFLOAD_PLAYBACK_LATENCY;
+
+ return (out->config.period_count * out->config.period_size * 1000) /
+ (out->config.rate);
}
static int out_set_volume(struct audio_stream_out *stream, float left,
float right)
{
struct stream_out *out = (struct stream_out *)stream;
+ int volume[2];
+
if (out->usecase == USECASE_AUDIO_PLAYBACK_MULTI_CH) {
/* only take left channel into account: the API is for stereo anyway */
out->muted = (left == 0.0f);
return 0;
+ } else if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ const char *mixer_ctl_name = "Compress Playback Volume";
+ struct audio_device *adev = out->dev;
+ struct mixer_ctl *ctl;
+
+ ctl = mixer_get_ctl_by_name(adev->mixer, mixer_ctl_name);
+ if (!ctl) {
+ ALOGE("%s: Could not get ctl for mixer cmd - %s",
+ __func__, mixer_ctl_name);
+ return -EINVAL;
+ }
+ volume[0] = (int)(left * COMPRESS_PLAYBACK_VOLUME_MAX);
+ volume[1] = (int)(right * COMPRESS_PLAYBACK_VOLUME_MAX);
+ mixer_ctl_set_array(ctl, volume, sizeof(volume)/sizeof(volume[0]));
+ return 0;
}
+
return -ENOSYS;
}
@@ -1030,7 +1400,7 @@
{
struct stream_out *out = (struct stream_out *)stream;
struct audio_device *adev = out->dev;
- int i, ret = -1;
+ ssize_t ret = 0;
pthread_mutex_lock(&out->lock);
if (out->standby) {
@@ -1038,17 +1408,42 @@
pthread_mutex_lock(&adev->lock);
ret = start_output_stream(out);
pthread_mutex_unlock(&adev->lock);
+ /* ToDo: If use case is compress offload should return 0 */
if (ret != 0) {
out->standby = true;
goto exit;
}
}
- if (out->pcm) {
- if (out->muted)
- memset((void *)buffer, 0, bytes);
- //ALOGV("%s: writing buffer (%d bytes) to pcm device", __func__, bytes);
- ret = pcm_write(out->pcm, (void *)buffer, bytes);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ ALOGVV("%s: writing buffer (%d bytes) to compress device", __func__, bytes);
+ if (out->send_new_metadata) {
+ ALOGVV("send new gapless metadata");
+ compress_set_gapless_metadata(out->compr, &out->gapless_mdata);
+ out->send_new_metadata = 0;
+ }
+
+ ret = compress_write(out->compr, buffer, bytes);
+ ALOGVV("%s: writing buffer (%d bytes) to compress device returned %d", __func__, bytes, ret);
+ if (ret >= 0 && ret < (ssize_t)bytes) {
+ send_offload_cmd_l(out, OFFLOAD_CMD_WAIT_FOR_BUFFER);
+ }
+ if (!out->playback_started) {
+ compress_start(out->compr);
+ out->playback_started = 1;
+ out->offload_state = OFFLOAD_STATE_PLAYING;
+ }
+ pthread_mutex_unlock(&out->lock);
+ return ret;
+ } else {
+ if (out->pcm) {
+ if (out->muted)
+ memset((void *)buffer, 0, bytes);
+ ALOGVV("%s: writing buffer (%d bytes) to pcm device", __func__, bytes);
+ ret = pcm_write(out->pcm, (void *)buffer, bytes);
+ if (ret == 0)
+ out->written += bytes / (out->config.channels * sizeof(short));
+ }
}
exit:
@@ -1067,7 +1462,20 @@
static int out_get_render_position(const struct audio_stream_out *stream,
uint32_t *dsp_frames)
{
- return -EINVAL;
+ struct stream_out *out = (struct stream_out *)stream;
+ *dsp_frames = 0;
+ if ((out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) && (dsp_frames != NULL)) {
+ pthread_mutex_lock(&out->lock);
+ if (out->compr != NULL) {
+ compress_get_tstamp(out->compr, (unsigned long *)dsp_frames,
+ &out->sample_rate);
+ ALOGVV("%s rendered frames %d sample_rate %d",
+ __func__, *dsp_frames, out->sample_rate);
+ }
+ pthread_mutex_unlock(&out->lock);
+ return 0;
+ } else
+ return -EINVAL;
}
static int out_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect)
@@ -1086,6 +1494,126 @@
return -EINVAL;
}
+static int out_get_presentation_position(const struct audio_stream_out *stream,
+ uint64_t *frames, struct timespec *timestamp)
+{
+ struct stream_out *out = (struct stream_out *)stream;
+ int ret = -1;
+ unsigned long dsp_frames;
+
+ pthread_mutex_lock(&out->lock);
+
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ if (out->compr != NULL) {
+ compress_get_tstamp(out->compr, &dsp_frames,
+ &out->sample_rate);
+ ALOGVV("%s rendered frames %ld sample_rate %d",
+ __func__, dsp_frames, out->sample_rate);
+ *frames = dsp_frames;
+ ret = 0;
+ /* this is the best we can do */
+ clock_gettime(CLOCK_MONOTONIC, timestamp);
+ }
+ } else {
+ if (out->pcm) {
+ size_t avail;
+ if (pcm_get_htimestamp(out->pcm, &avail, timestamp) == 0) {
+ size_t kernel_buffer_size = out->config.period_size * out->config.period_count;
+ int64_t signed_frames = out->written - kernel_buffer_size + avail;
+ // This adjustment accounts for buffering after app processor.
+ // It is based on estimated DSP latency per use case, rather than exact.
+ signed_frames -=
+ (platform_render_latency(out->usecase) * out->sample_rate / 1000000LL);
+
+ // It would be unusual for this value to be negative, but check just in case ...
+ if (signed_frames >= 0) {
+ *frames = signed_frames;
+ ret = 0;
+ }
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&out->lock);
+
+ return ret;
+}
+
+static int out_set_callback(struct audio_stream_out *stream,
+ stream_callback_t callback, void *cookie)
+{
+ struct stream_out *out = (struct stream_out *)stream;
+
+ ALOGV("%s", __func__);
+ pthread_mutex_lock(&out->lock);
+ out->offload_callback = callback;
+ out->offload_cookie = cookie;
+ pthread_mutex_unlock(&out->lock);
+ return 0;
+}
+
+static int out_pause(struct audio_stream_out* stream)
+{
+ struct stream_out *out = (struct stream_out *)stream;
+ int status = -ENOSYS;
+ ALOGV("%s", __func__);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ pthread_mutex_lock(&out->lock);
+ if (out->compr != NULL && out->offload_state == OFFLOAD_STATE_PLAYING) {
+ status = compress_pause(out->compr);
+ out->offload_state = OFFLOAD_STATE_PAUSED;
+ }
+ pthread_mutex_unlock(&out->lock);
+ }
+ return status;
+}
+
+static int out_resume(struct audio_stream_out* stream)
+{
+ struct stream_out *out = (struct stream_out *)stream;
+ int status = -ENOSYS;
+ ALOGV("%s", __func__);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ status = 0;
+ pthread_mutex_lock(&out->lock);
+ if (out->compr != NULL && out->offload_state == OFFLOAD_STATE_PAUSED) {
+ status = compress_resume(out->compr);
+ out->offload_state = OFFLOAD_STATE_PLAYING;
+ }
+ pthread_mutex_unlock(&out->lock);
+ }
+ return status;
+}
+
+static int out_drain(struct audio_stream_out* stream, audio_drain_type_t type )
+{
+ struct stream_out *out = (struct stream_out *)stream;
+ int status = -ENOSYS;
+ ALOGV("%s", __func__);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ pthread_mutex_lock(&out->lock);
+ if (type == AUDIO_DRAIN_EARLY_NOTIFY)
+ status = send_offload_cmd_l(out, OFFLOAD_CMD_PARTIAL_DRAIN);
+ else
+ status = send_offload_cmd_l(out, OFFLOAD_CMD_DRAIN);
+ pthread_mutex_unlock(&out->lock);
+ }
+ return status;
+}
+
+static int out_flush(struct audio_stream_out* stream)
+{
+ struct stream_out *out = (struct stream_out *)stream;
+ ALOGV("%s", __func__);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ pthread_mutex_lock(&out->lock);
+ stop_compressed_output_l(out);
+ pthread_mutex_unlock(&out->lock);
+ return 0;
+ }
+ return -ENOSYS;
+}
+
/** audio_stream_in implementation **/
static uint32_t in_get_sample_rate(const struct audio_stream *stream)
{
@@ -1313,11 +1841,14 @@
if (devices == AUDIO_DEVICE_NONE)
devices = AUDIO_DEVICE_OUT_SPEAKER;
- out->supported_channel_masks[0] = AUDIO_CHANNEL_OUT_STEREO;
- out->channel_mask = AUDIO_CHANNEL_OUT_STEREO;
out->flags = flags;
out->devices = devices;
out->dev = adev;
+ out->format = config->format;
+ out->sample_rate = config->sample_rate;
+ out->channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+ out->supported_channel_masks[0] = AUDIO_CHANNEL_OUT_STEREO;
+ out->handle = handle;
/* Init use case and pcm_config */
if (out->flags & AUDIO_OUTPUT_FLAG_DIRECT &&
@@ -1325,12 +1856,8 @@
pthread_mutex_lock(&adev->lock);
ret = read_hdmi_channel_masks(out);
pthread_mutex_unlock(&adev->lock);
- if (ret != 0) {
- /* If HDMI does not support multi channel playback, set the default */
- out->config.channels = popcount(out->channel_mask);
- platform_set_hdmi_channels(adev->platform, out->config.channels);
+ if (ret != 0)
goto error_open;
- }
if (config->sample_rate == 0)
config->sample_rate = DEFAULT_OUTPUT_SAMPLING_RATE;
@@ -1338,18 +1865,70 @@
config->channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
out->channel_mask = config->channel_mask;
+ out->sample_rate = config->sample_rate;
out->usecase = USECASE_AUDIO_PLAYBACK_MULTI_CH;
out->config = pcm_config_hdmi_multi;
out->config.rate = config->sample_rate;
out->config.channels = popcount(out->channel_mask);
out->config.period_size = HDMI_MULTI_PERIOD_BYTES / (out->config.channels * 2);
- platform_set_hdmi_channels(adev->platform, out->config.channels);
} else if (out->flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) {
out->usecase = USECASE_AUDIO_PLAYBACK_DEEP_BUFFER;
out->config = pcm_config_deep_buffer;
+ out->sample_rate = out->config.rate;
+ } else if (out->flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+ if (config->offload_info.version != AUDIO_INFO_INITIALIZER.version ||
+ config->offload_info.size != AUDIO_INFO_INITIALIZER.size) {
+ ALOGE("%s: Unsupported Offload information", __func__);
+ ret = -EINVAL;
+ goto error_open;
+ }
+ if (!is_supported_format(config->offload_info.format)) {
+ ALOGE("%s: Unsupported audio format", __func__);
+ ret = -EINVAL;
+ goto error_open;
+ }
+
+ out->compr_config.codec = (struct snd_codec *)
+ calloc(1, sizeof(struct snd_codec));
+
+ out->usecase = USECASE_AUDIO_PLAYBACK_OFFLOAD;
+ if (config->offload_info.channel_mask)
+ out->channel_mask = config->offload_info.channel_mask;
+ else if (config->channel_mask)
+ out->channel_mask = config->channel_mask;
+ out->format = config->offload_info.format;
+ out->sample_rate = config->offload_info.sample_rate;
+
+ out->stream.set_callback = out_set_callback;
+ out->stream.pause = out_pause;
+ out->stream.resume = out_resume;
+ out->stream.drain = out_drain;
+ out->stream.flush = out_flush;
+
+ out->compr_config.codec->id =
+ get_snd_codec_id(config->offload_info.format);
+ out->compr_config.fragment_size = COMPRESS_OFFLOAD_FRAGMENT_SIZE;
+ out->compr_config.fragments = COMPRESS_OFFLOAD_NUM_FRAGMENTS;
+ out->compr_config.codec->sample_rate =
+ compress_get_alsa_rate(config->offload_info.sample_rate);
+ out->compr_config.codec->bit_rate =
+ config->offload_info.bit_rate;
+ out->compr_config.codec->ch_in =
+ popcount(config->channel_mask);
+ out->compr_config.codec->ch_out = out->compr_config.codec->ch_in;
+
+ if (flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING)
+ out->non_blocking = 1;
+
+ out->send_new_metadata = 1;
+ create_offload_callback_thread(out);
+ ALOGV("%s: offloaded output offload_info version %04x bit rate %d",
+ __func__, config->offload_info.version,
+ config->offload_info.bit_rate);
} else {
out->usecase = USECASE_AUDIO_PLAYBACK_LOW_LATENCY;
out->config = pcm_config_low_latency;
+ out->sample_rate = out->config.rate;
}
if (flags & AUDIO_OUTPUT_FLAG_PRIMARY) {
@@ -1389,9 +1968,14 @@
out->stream.write = out_write;
out->stream.get_render_position = out_get_render_position;
out->stream.get_next_write_timestamp = out_get_next_write_timestamp;
+ out->stream.get_presentation_position = out_get_presentation_position;
out->standby = 1;
/* out->muted = false; by calloc() */
+ /* out->written = 0; by calloc() */
+
+ pthread_mutex_init(&out->lock, (const pthread_mutexattr_t *) NULL);
+ pthread_cond_init(&out->cond, (const pthread_condattr_t *) NULL);
config->format = out->stream.common.get_format(&out->stream.common);
config->channel_mask = out->stream.common.get_channels(&out->stream.common);
@@ -1411,8 +1995,19 @@
static void adev_close_output_stream(struct audio_hw_device *dev,
struct audio_stream_out *stream)
{
+ struct stream_out *out = (struct stream_out *)stream;
+ struct audio_device *adev = out->dev;
+
ALOGV("%s: enter", __func__);
out_standby(&stream->common);
+ if (out->usecase == USECASE_AUDIO_PLAYBACK_OFFLOAD) {
+ destroy_offload_callback_thread(out);
+
+ if (out->compr_config.codec != NULL)
+ free(out->compr_config.codec);
+ }
+ pthread_cond_destroy(&out->cond);
+ pthread_mutex_destroy(&out->lock);
free(stream);
ALOGV("%s: exit", __func__);
}
@@ -1516,7 +2111,13 @@
static int adev_set_voice_volume(struct audio_hw_device *dev, float volume)
{
- return voice_set_volume((struct audio_device *)dev, volume);
+ int ret;
+ struct audio_device *adev = (struct audio_device *)dev;
+ pthread_mutex_lock(&adev->lock);
+ /* cache volume */
+ ret = voice_set_volume(adev, volume);
+ pthread_mutex_unlock(&adev->lock);
+ return ret;
}
static int adev_set_master_volume(struct audio_hw_device *dev, float volume)
@@ -1716,6 +2317,7 @@
adev->out_device = AUDIO_DEVICE_NONE;
adev->bluetooth_nrec = true;
adev->acdb_settings = TTY_MODE_OFF;
+ /* adev->cur_hdmi_channels = 0; by calloc() */
adev->snd_dev_ref_cnt = calloc(SND_DEVICE_MAX, sizeof(int));
voice_init(adev);
list_init(&adev->usecase_list);
@@ -1729,6 +2331,22 @@
*device = NULL;
return -EINVAL;
}
+
+ if (access(VISUALIZER_LIBRARY_PATH, R_OK) == 0) {
+ adev->visualizer_lib = dlopen(VISUALIZER_LIBRARY_PATH, RTLD_NOW);
+ if (adev->visualizer_lib == NULL) {
+ ALOGE("%s: DLOPEN failed for %s", __func__, VISUALIZER_LIBRARY_PATH);
+ } else {
+ ALOGV("%s: DLOPEN successful for %s", __func__, VISUALIZER_LIBRARY_PATH);
+ adev->visualizer_start_output =
+ (int (*)(audio_io_handle_t))dlsym(adev->visualizer_lib,
+ "visualizer_hal_start_output");
+ adev->visualizer_stop_output =
+ (int (*)(audio_io_handle_t))dlsym(adev->visualizer_lib,
+ "visualizer_hal_stop_output");
+ }
+ }
+
*device = &adev->device.common;
/* update init flag*/
diff --git a/hal/audio_hw.h b/hal/audio_hw.h
index 9b397b4..c43b557 100644
--- a/hal/audio_hw.h
+++ b/hal/audio_hw.h
@@ -23,10 +23,13 @@
#include <cutils/list.h>
#include <hardware/audio.h>
#include <tinyalsa/asoundlib.h>
+#include <tinycompress/tinycompress.h>
#include <audio_route/audio_route.h>
#include "voice.h"
+#define VISUALIZER_LIBRARY_PATH "/system/lib/soundfx/libqcomvisualizer.so"
+
/* Flags used to initialize acdb_settings variable that goes to ACDB library */
#define DMIC_FLAG 0x00000002
#define QMIC_FLAG 0x00000004
@@ -40,6 +43,7 @@
#define ACDB_DEV_TYPE_IN 2
#define MAX_SUPPORTED_CHANNEL_MASKS 2
+#define DEFAULT_HDMI_OUT_CHANNELS 2
typedef int snd_device_t;
@@ -53,6 +57,8 @@
USECASE_AUDIO_PLAYBACK_DEEP_BUFFER = 0,
USECASE_AUDIO_PLAYBACK_LOW_LATENCY,
USECASE_AUDIO_PLAYBACK_MULTI_CH,
+ USECASE_AUDIO_PLAYBACK_OFFLOAD,
+
/* FM usecase */
USECASE_AUDIO_PLAYBACK_FM,
@@ -86,20 +92,59 @@
* the buffer size of an input/output stream
*/
+enum {
+ OFFLOAD_CMD_EXIT, /* exit compress offload thread loop*/
+ OFFLOAD_CMD_DRAIN, /* send a full drain request to DSP */
+ OFFLOAD_CMD_PARTIAL_DRAIN, /* send a partial drain request to DSP */
+ OFFLOAD_CMD_WAIT_FOR_BUFFER, /* wait for buffer released by DSP */
+};
+
+enum {
+ OFFLOAD_STATE_IDLE,
+ OFFLOAD_STATE_PLAYING,
+ OFFLOAD_STATE_PAUSED,
+};
+
+struct offload_cmd {
+ struct listnode node;
+ int cmd;
+ int data[];
+};
+
struct stream_out {
struct audio_stream_out stream;
pthread_mutex_t lock; /* see note below on mutex acquisition order */
+ pthread_cond_t cond;
struct pcm_config config;
+ struct compr_config compr_config;
struct pcm *pcm;
+ struct compress *compr;
int standby;
int pcm_device_id;
+ unsigned int sample_rate;
audio_channel_mask_t channel_mask;
+ audio_format_t format;
audio_devices_t devices;
audio_output_flags_t flags;
audio_usecase_t usecase;
/* Array of supported channel mask configurations. +1 so that the last entry is always 0 */
audio_channel_mask_t supported_channel_masks[MAX_SUPPORTED_CHANNEL_MASKS + 1];
bool muted;
+ uint64_t written; /* total frames written, not cleared when entering standby */
+ audio_io_handle_t handle;
+
+ int non_blocking;
+ int playback_started;
+ int offload_state;
+ pthread_cond_t offload_cond;
+ pthread_t offload_thread;
+ struct listnode offload_cmd_list;
+ bool offload_thread_blocked;
+
+ stream_callback_t offload_callback;
+ void *offload_cookie;
+ struct compr_gapless_mdata gapless_mdata;
+ int send_new_metadata;
struct audio_device *dev;
};
@@ -157,7 +202,13 @@
int acdb_settings;
bool speaker_lr_swap;
struct voice voice;
+ unsigned int cur_hdmi_channels;
+
void *platform;
+
+ void *visualizer_lib;
+ int (*visualizer_start_output)(audio_io_handle_t);
+ int (*visualizer_stop_output)(audio_io_handle_t);
};
int select_devices(struct audio_device *adev,
diff --git a/hal/msm8960/platform.c b/hal/msm8960/platform.c
index 78d06c5..ac36f87 100644
--- a/hal/msm8960/platform.c
+++ b/hal/msm8960/platform.c
@@ -192,6 +192,9 @@
[SND_DEVICE_IN_VOICE_REC_DMIC_FLUENCE] = 6,
};
+#define DEEP_BUFFER_PLATFORM_DELAY (29*1000LL)
+#define LOW_LATENCY_PLATFORM_DELAY (13*1000LL)
+
static pthread_once_t check_op_once_ctl = PTHREAD_ONCE_INIT;
static bool is_tmus = false;
@@ -889,3 +892,16 @@
LOGE("%s: Not implemented", __func__);
return -ENOSYS;
}
+
+/* Delay in Us */
+int64_t platform_render_latency(audio_usecase_t usecase)
+{
+ switch (usecase) {
+ case USECASE_AUDIO_PLAYBACK_DEEP_BUFFER:
+ return DEEP_BUFFER_PLATFORM_DELAY;
+ case USECASE_AUDIO_PLAYBACK_LOW_LATENCY:
+ return LOW_LATENCY_PLATFORM_DELAY;
+ default:
+ return 0;
+ }
+}
diff --git a/hal/msm8974/platform.c b/hal/msm8974/platform.c
index d2626af..134fdbb 100644
--- a/hal/msm8974/platform.c
+++ b/hal/msm8974/platform.c
@@ -236,6 +236,9 @@
[SND_DEVICE_IN_VOICE_REC_DMIC_FLUENCE] = 6,
};
+#define DEEP_BUFFER_PLATFORM_DELAY (29*1000LL)
+#define LOW_LATENCY_PLATFORM_DELAY (13*1000LL)
+
static pthread_once_t check_op_once_ctl = PTHREAD_ONCE_INIT;
static bool is_tmus = false;
@@ -251,6 +254,19 @@
case 310490:
case 310260:
case 310026:
+ /* Add new TMUS MNC(800, 660, 580, 310, 270, 250, 240, 230, 220, 210, 200, 160) */
+ case 310800:
+ case 310660:
+ case 310580:
+ case 310310:
+ case 310270:
+ case 310250:
+ case 310240:
+ case 310230:
+ case 310220:
+ case 310210:
+ case 310200:
+ case 310160:
is_tmus = true;
break;
}
@@ -1065,3 +1081,15 @@
ALOGV("%s: exit: returns - %s", __func__, str_parms_to_str(reply));
}
+/* Delay in Us */
+int64_t platform_render_latency(audio_usecase_t usecase)
+{
+ switch (usecase) {
+ case USECASE_AUDIO_PLAYBACK_DEEP_BUFFER:
+ return DEEP_BUFFER_PLATFORM_DELAY;
+ case USECASE_AUDIO_PLAYBACK_LOW_LATENCY:
+ return LOW_LATENCY_PLATFORM_DELAY;
+ default:
+ return 0;
+ }
+}
diff --git a/hal/platform_api.h b/hal/platform_api.h
index 39c94e8..04049f4 100644
--- a/hal/platform_api.h
+++ b/hal/platform_api.h
@@ -43,4 +43,7 @@
int platform_set_parameters(void *platform, struct str_parms *parms);
int platform_set_incall_recoding_session_id(void *platform, uint32_t session_id);
+/* returns the latency for a usecase in Us */
+int64_t platform_render_latency(audio_usecase_t usecase);
+
#endif // QCOM_AUDIO_PLATFORM_API_H
diff --git a/hal/voice.c b/hal/voice.c
old mode 100644
new mode 100755
index 7b40d1a..190df7c
--- a/hal/voice.c
+++ b/hal/voice.c
@@ -170,6 +170,8 @@
pcm_start(session->pcm_rx);
pcm_start(session->pcm_tx);
+ voice_set_volume(adev, adev->voice.volume);
+
ret = platform_start_voice_call(adev->platform);
if (ret < 0) {
ALOGE("%s: platform_start_voice_call error %d\n", __func__, ret);
@@ -271,7 +273,7 @@
{
int vol, err = 0;
- pthread_mutex_lock(&adev->lock);
+ adev->voice.volume = volume;
if (adev->mode == AUDIO_MODE_IN_CALL) {
if (volume < 0.0) {
volume = 0.0;
@@ -287,11 +289,8 @@
vol = 100 - vol;
err = platform_set_voice_volume(adev->platform, vol);
- if (!err) {
- adev->voice.volume = volume;
- }
}
- pthread_mutex_unlock(&adev->lock);
+
return err;
}
diff --git a/visualizer/Android.mk b/visualizer/Android.mk
new file mode 100644
index 0000000..3c92044
--- /dev/null
+++ b/visualizer/Android.mk
@@ -0,0 +1,36 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ offload_visualizer.c
+
+LOCAL_CFLAGS+= -O2 -fvisibility=hidden
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ liblog \
+ libtinyalsa
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx
+LOCAL_MODULE:= libqcomvisualizer
+
+LOCAL_C_INCLUDES := \
+ external/tinyalsa/include \
+ $(call include-path-for, audio-effects)
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/visualizer/MODULE_LICENSE_APACHE2 b/visualizer/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/visualizer/MODULE_LICENSE_APACHE2
diff --git a/visualizer/NOTICE b/visualizer/NOTICE
new file mode 100644
index 0000000..ad6ed94
--- /dev/null
+++ b/visualizer/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2013, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/visualizer/offload_visualizer.c b/visualizer/offload_visualizer.c
new file mode 100644
index 0000000..eb43558
--- /dev/null
+++ b/visualizer/offload_visualizer.c
@@ -0,0 +1,1241 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "offload_visualizer"
+/*#define LOG_NDEBUG 0*/
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <sys/prctl.h>
+
+#include <cutils/list.h>
+#include <cutils/log.h>
+#include <system/thread_defs.h>
+#include <tinyalsa/asoundlib.h>
+#include <audio_effects/effect_visualizer.h>
+
+
+enum {
+ EFFECT_STATE_UNINITIALIZED,
+ EFFECT_STATE_INITIALIZED,
+ EFFECT_STATE_ACTIVE,
+};
+
+typedef struct effect_context_s effect_context_t;
+
+/* effect specific operations. Only the init() and process() operations must be defined.
+ * Others are optional.
+ */
+typedef struct effect_ops_s {
+ int (*init)(effect_context_t *context);
+ int (*release)(effect_context_t *context);
+ int (*reset)(effect_context_t *context);
+ int (*enable)(effect_context_t *context);
+ int (*disable)(effect_context_t *context);
+ int (*process)(effect_context_t *context, audio_buffer_t *in, audio_buffer_t *out);
+ int (*set_parameter)(effect_context_t *context, effect_param_t *param, uint32_t size);
+ int (*get_parameter)(effect_context_t *context, effect_param_t *param, uint32_t *size);
+ int (*command)(effect_context_t *context, uint32_t cmdCode, uint32_t cmdSize,
+ void *pCmdData, uint32_t *replySize, void *pReplyData);
+} effect_ops_t;
+
+struct effect_context_s {
+ const struct effect_interface_s *itfe;
+ struct listnode effects_list_node; /* node in created_effects_list */
+ struct listnode output_node; /* node in output_context_t.effects_list */
+ effect_config_t config;
+ const effect_descriptor_t *desc;
+ audio_io_handle_t out_handle; /* io handle of the output the effect is attached to */
+ uint32_t state;
+ bool offload_enabled; /* when offload is enabled we process VISUALIZER_CMD_CAPTURE command.
+ Otherwise non offloaded visualizer has already processed the command
+ and we must not overwrite the reply. */
+ effect_ops_t ops;
+};
+
+typedef struct output_context_s {
+ struct listnode outputs_list_node; /* node in active_outputs_list */
+ audio_io_handle_t handle; /* io handle */
+ struct listnode effects_list; /* list of effects attached to this output */
+} output_context_t;
+
+
+/* maximum time since last capture buffer update before resetting capture buffer. This means
+ that the framework has stopped playing audio and we must start returning silence */
+#define MAX_STALL_TIME_MS 1000
+
+#define CAPTURE_BUF_SIZE 65536 /* "64k should be enough for everyone" */
+
+#define DISCARD_MEASUREMENTS_TIME_MS 2000 /* discard measurements older than this number of ms */
+
+/* maximum number of buffers for which we keep track of the measurements */
+#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 /* note: buffer index is stored in uint8_t */
+
+typedef struct buffer_stats_s {
+ bool is_valid;
+ uint16_t peak_u16; /* the positive peak of the absolute value of the samples in a buffer */
+ float rms_squared; /* the average square of the samples in a buffer */
+} buffer_stats_t;
+
+typedef struct visualizer_context_s {
+ effect_context_t common;
+
+ uint32_t capture_idx;
+ uint32_t capture_size;
+ uint32_t scaling_mode;
+ uint32_t last_capture_idx;
+ uint32_t latency;
+ struct timespec buffer_update_time;
+ uint8_t capture_buf[CAPTURE_BUF_SIZE];
+ /* for measurements */
+ uint8_t channel_count; /* to avoid recomputing it every time a buffer is processed */
+ uint32_t meas_mode;
+ uint8_t meas_wndw_size_in_buffers;
+ uint8_t meas_buffer_idx;
+ buffer_stats_t past_meas[MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS];
+} visualizer_context_t;
+
+
+extern const struct effect_interface_s effect_interface;
+
+/* Offload visualizer UUID: 7a8044a0-1a71-11e3-a184-0002a5d5c51b */
+const effect_descriptor_t visualizer_descriptor = {
+ {0xe46b26a0, 0xdddd, 0x11db, 0x8afd, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ {0x7a8044a0, 0x1a71, 0x11e3, 0xa184, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}},
+ EFFECT_CONTROL_API_VERSION,
+ (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_HW_ACC_TUNNEL ),
+ 0, /* TODO */
+ 1,
+ "QCOM MSM offload visualizer",
+ "The Android Open Source Project",
+};
+
+const effect_descriptor_t *descriptors[] = {
+ &visualizer_descriptor,
+ NULL,
+};
+
+
+pthread_once_t once = PTHREAD_ONCE_INIT;
+int init_status;
+
+/* list of created effects. Updated by visualizer_hal_start_output()
+ * and visualizer_hal_stop_output() */
+struct listnode created_effects_list;
+/* list of active output streams. Updated by visualizer_hal_start_output()
+ * and visualizer_hal_stop_output() */
+struct listnode active_outputs_list;
+
+/* thread capturing PCM from Proxy port and calling the process function on each enabled effect
+ * attached to an active output stream */
+pthread_t capture_thread;
+/* lock must be held when modifying or accessing created_effects_list or active_outputs_list */
+pthread_mutex_t lock;
+/* thread_lock must be held when starting or stopping the capture thread.
+ * Locking order: thread_lock -> lock */
+pthread_mutex_t thread_lock;
+/* cond is signaled when an output is started or stopped or an effect is enabled or disable: the
+ * capture thread will reevaluate the capture and effect rocess conditions. */
+pthread_cond_t cond;
+/* true when requesting the capture thread to exit */
+bool exit_thread;
+/* 0 if the capture thread was created successfully */
+int thread_status;
+
+
+#define DSP_OUTPUT_LATENCY_MS 0 /* Fudge factor for latency after capture point in audio DSP */
+
+/* Retry for delay for mixer open */
+#define RETRY_NUMBER 10
+#define RETRY_US 500000
+
+#define MIXER_CARD 0
+#define SOUND_CARD 0
+#define CAPTURE_DEVICE 8
+
+/* Proxy port supports only MMAP read and those fixed parameters*/
+#define AUDIO_CAPTURE_CHANNEL_COUNT 2
+#define AUDIO_CAPTURE_SMP_RATE 48000
+#define AUDIO_CAPTURE_PERIOD_SIZE (768)
+#define AUDIO_CAPTURE_PERIOD_COUNT 32
+
+struct pcm_config pcm_config_capture = {
+ .channels = AUDIO_CAPTURE_CHANNEL_COUNT,
+ .rate = AUDIO_CAPTURE_SMP_RATE,
+ .period_size = AUDIO_CAPTURE_PERIOD_SIZE,
+ .period_count = AUDIO_CAPTURE_PERIOD_COUNT,
+ .format = PCM_FORMAT_S16_LE,
+ .start_threshold = AUDIO_CAPTURE_PERIOD_SIZE / 4,
+ .stop_threshold = INT_MAX,
+ .avail_min = AUDIO_CAPTURE_PERIOD_SIZE / 4,
+};
+
+
+/*
+ * Local functions
+ */
+
+static void init_once() {
+ list_init(&created_effects_list);
+ list_init(&active_outputs_list);
+
+ pthread_mutex_init(&lock, NULL);
+ pthread_mutex_init(&thread_lock, NULL);
+ pthread_cond_init(&cond, NULL);
+ exit_thread = false;
+ thread_status = -1;
+
+ init_status = 0;
+}
+
+int lib_init() {
+ pthread_once(&once, init_once);
+ return init_status;
+}
+
+bool effect_exists(effect_context_t *context) {
+ struct listnode *node;
+
+ list_for_each(node, &created_effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(node,
+ effect_context_t,
+ effects_list_node);
+ if (fx_ctxt == context) {
+ return true;
+ }
+ }
+ return false;
+}
+
+output_context_t *get_output(audio_io_handle_t output) {
+ struct listnode *node;
+
+ list_for_each(node, &active_outputs_list) {
+ output_context_t *out_ctxt = node_to_item(node,
+ output_context_t,
+ outputs_list_node);
+ if (out_ctxt->handle == output) {
+ return out_ctxt;
+ }
+ }
+ return NULL;
+}
+
+void add_effect_to_output(output_context_t * output, effect_context_t *context) {
+ struct listnode *fx_node;
+
+ list_for_each(fx_node, &output->effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(fx_node,
+ effect_context_t,
+ output_node);
+ if (fx_ctxt == context)
+ return;
+ }
+ list_add_tail(&output->effects_list, &context->output_node);
+}
+
+void remove_effect_from_output(output_context_t * output, effect_context_t *context) {
+ struct listnode *fx_node;
+
+ list_for_each(fx_node, &output->effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(fx_node,
+ effect_context_t,
+ output_node);
+ if (fx_ctxt == context) {
+ list_remove(&context->output_node);
+ return;
+ }
+ }
+}
+
+bool effects_enabled() {
+ struct listnode *out_node;
+
+ list_for_each(out_node, &active_outputs_list) {
+ struct listnode *fx_node;
+ output_context_t *out_ctxt = node_to_item(out_node,
+ output_context_t,
+ outputs_list_node);
+
+ list_for_each(fx_node, &out_ctxt->effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(fx_node,
+ effect_context_t,
+ output_node);
+ if (fx_ctxt->state == EFFECT_STATE_ACTIVE)
+ return true;
+ }
+ }
+ return false;
+}
+
+int configure_proxy_capture(struct mixer *mixer, int value) {
+ const char *proxy_ctl_name = "AFE_PCM_RX Audio Mixer MultiMedia4";
+ struct mixer_ctl *ctl;
+
+ ctl = mixer_get_ctl_by_name(mixer, proxy_ctl_name);
+ if (ctl == NULL) {
+ ALOGW("%s: could not get %s ctl", __func__, proxy_ctl_name);
+ return -EINVAL;
+ }
+ if (mixer_ctl_set_value(ctl, 0, value) != 0)
+ ALOGW("%s: error setting value %d on %s ", __func__, value, proxy_ctl_name);
+
+ return 0;
+}
+
+
+void *capture_thread_loop(void *arg)
+{
+ int16_t data[AUDIO_CAPTURE_PERIOD_SIZE * AUDIO_CAPTURE_CHANNEL_COUNT * sizeof(int16_t)];
+ audio_buffer_t buf;
+ buf.frameCount = AUDIO_CAPTURE_PERIOD_SIZE;
+ buf.s16 = data;
+ bool capture_enabled = false;
+ struct mixer *mixer;
+ struct pcm *pcm = NULL;
+ int ret;
+ int retry_num = 0;
+
+ ALOGD("thread enter");
+
+ prctl(PR_SET_NAME, (unsigned long)"visualizer capture", 0, 0, 0);
+
+ pthread_mutex_lock(&lock);
+
+ mixer = mixer_open(MIXER_CARD);
+ while (mixer == NULL && retry_num < RETRY_NUMBER) {
+ usleep(RETRY_US);
+ mixer = mixer_open(MIXER_CARD);
+ retry_num++;
+ }
+ if (mixer == NULL) {
+ pthread_mutex_unlock(&lock);
+ return NULL;
+ }
+
+ for (;;) {
+ if (exit_thread) {
+ break;
+ }
+ if (effects_enabled()) {
+ if (!capture_enabled) {
+ ret = configure_proxy_capture(mixer, 1);
+ if (ret == 0) {
+ pcm = pcm_open(SOUND_CARD, CAPTURE_DEVICE,
+ PCM_IN|PCM_MMAP|PCM_NOIRQ, &pcm_config_capture);
+ if (pcm && !pcm_is_ready(pcm)) {
+ ALOGW("%s: %s", __func__, pcm_get_error(pcm));
+ pcm_close(pcm);
+ pcm = NULL;
+ configure_proxy_capture(mixer, 0);
+ } else {
+ capture_enabled = true;
+ ALOGD("%s: capture ENABLED", __func__);
+ }
+ }
+ }
+ } else {
+ if (capture_enabled) {
+ if (pcm != NULL)
+ pcm_close(pcm);
+ configure_proxy_capture(mixer, 0);
+ ALOGD("%s: capture DISABLED", __func__);
+ capture_enabled = false;
+ }
+ pthread_cond_wait(&cond, &lock);
+ }
+ if (!capture_enabled)
+ continue;
+
+ pthread_mutex_unlock(&lock);
+ ret = pcm_mmap_read(pcm, data, sizeof(data));
+ pthread_mutex_lock(&lock);
+
+ if (ret == 0) {
+ struct listnode *out_node;
+
+ list_for_each(out_node, &active_outputs_list) {
+ output_context_t *out_ctxt = node_to_item(out_node,
+ output_context_t,
+ outputs_list_node);
+ struct listnode *fx_node;
+
+ list_for_each(fx_node, &out_ctxt->effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(fx_node,
+ effect_context_t,
+ output_node);
+ fx_ctxt->ops.process(fx_ctxt, &buf, &buf);
+ }
+ }
+ } else {
+ ALOGW("%s: read status %d %s", __func__, ret, pcm_get_error(pcm));
+ }
+ }
+
+ if (capture_enabled) {
+ if (pcm != NULL)
+ pcm_close(pcm);
+ configure_proxy_capture(mixer, 0);
+ }
+ mixer_close(mixer);
+ pthread_mutex_unlock(&lock);
+
+ ALOGD("thread exit");
+
+ return NULL;
+}
+
+/*
+ * Interface from audio HAL
+ */
+
+__attribute__ ((visibility ("default")))
+int visualizer_hal_start_output(audio_io_handle_t output) {
+ int ret;
+ struct listnode *node;
+
+ ALOGV("%s", __func__);
+
+ if (lib_init() != 0)
+ return init_status;
+
+ pthread_mutex_lock(&thread_lock);
+ pthread_mutex_lock(&lock);
+ if (get_output(output) != NULL) {
+ ALOGW("%s output already started", __func__);
+ ret = -ENOSYS;
+ goto exit;
+ }
+
+ output_context_t *out_ctxt = (output_context_t *)malloc(sizeof(output_context_t));
+ out_ctxt->handle = output;
+ list_init(&out_ctxt->effects_list);
+
+ list_for_each(node, &created_effects_list) {
+ effect_context_t *fx_ctxt = node_to_item(node,
+ effect_context_t,
+ effects_list_node);
+ if (fx_ctxt->out_handle == output) {
+ list_add_tail(&out_ctxt->effects_list, &fx_ctxt->output_node);
+ }
+ }
+ if (list_empty(&active_outputs_list)) {
+ exit_thread = false;
+ thread_status = pthread_create(&capture_thread, (const pthread_attr_t *) NULL,
+ capture_thread_loop, NULL);
+ }
+ list_add_tail(&active_outputs_list, &out_ctxt->outputs_list_node);
+ pthread_cond_signal(&cond);
+
+exit:
+ pthread_mutex_unlock(&lock);
+ pthread_mutex_unlock(&thread_lock);
+ return ret;
+}
+
+__attribute__ ((visibility ("default")))
+int visualizer_hal_stop_output(audio_io_handle_t output) {
+ int ret;
+ struct listnode *node;
+ output_context_t *out_ctxt;
+
+ ALOGV("%s", __func__);
+
+ if (lib_init() != 0)
+ return init_status;
+
+ pthread_mutex_lock(&thread_lock);
+ pthread_mutex_lock(&lock);
+
+ out_ctxt = get_output(output);
+ if (out_ctxt == NULL) {
+ ALOGW("%s output not started", __func__);
+ ret = -ENOSYS;
+ goto exit;
+ }
+
+ list_remove(&out_ctxt->outputs_list_node);
+ pthread_cond_signal(&cond);
+
+ if (list_empty(&active_outputs_list)) {
+ if (thread_status == 0) {
+ exit_thread = true;
+ pthread_cond_signal(&cond);
+ pthread_mutex_unlock(&lock);
+ pthread_join(capture_thread, (void **) NULL);
+ pthread_mutex_lock(&lock);
+ thread_status = -1;
+ }
+ }
+
+ free(out_ctxt);
+
+exit:
+ pthread_mutex_unlock(&lock);
+ pthread_mutex_unlock(&thread_lock);
+ return ret;
+}
+
+
+/*
+ * Effect operations
+ */
+
+int set_config(effect_context_t *context, effect_config_t *config)
+{
+ if (config->inputCfg.samplingRate != config->outputCfg.samplingRate) return -EINVAL;
+ if (config->inputCfg.channels != config->outputCfg.channels) return -EINVAL;
+ if (config->inputCfg.format != config->outputCfg.format) return -EINVAL;
+ if (config->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL;
+ if (config->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE &&
+ config->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL;
+ if (config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) return -EINVAL;
+
+ context->config = *config;
+
+ if (context->ops.reset)
+ context->ops.reset(context);
+
+ return 0;
+}
+
+void get_config(effect_context_t *context, effect_config_t *config)
+{
+ *config = context->config;
+}
+
+
+/*
+ * Visualizer operations
+ */
+
+uint32_t visualizer_get_delta_time_ms_from_updated_time(visualizer_context_t* visu_ctxt) {
+ uint32_t delta_ms = 0;
+ if (visu_ctxt->buffer_update_time.tv_sec != 0) {
+ struct timespec ts;
+ if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) {
+ time_t secs = ts.tv_sec - visu_ctxt->buffer_update_time.tv_sec;
+ long nsec = ts.tv_nsec - visu_ctxt->buffer_update_time.tv_nsec;
+ if (nsec < 0) {
+ --secs;
+ nsec += 1000000000;
+ }
+ delta_ms = secs * 1000 + nsec / 1000000;
+ }
+ }
+ return delta_ms;
+}
+
+int visualizer_reset(effect_context_t *context)
+{
+ visualizer_context_t * visu_ctxt = (visualizer_context_t *)context;
+
+ visu_ctxt->capture_idx = 0;
+ visu_ctxt->last_capture_idx = 0;
+ visu_ctxt->buffer_update_time.tv_sec = 0;
+ visu_ctxt->latency = DSP_OUTPUT_LATENCY_MS;
+ memset(visu_ctxt->capture_buf, 0x80, CAPTURE_BUF_SIZE);
+ return 0;
+}
+
+int visualizer_init(effect_context_t *context)
+{
+ int32_t i;
+
+ visualizer_context_t * visu_ctxt = (visualizer_context_t *)context;
+
+ context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ;
+ context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ context->config.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+ context->config.inputCfg.samplingRate = 44100;
+ context->config.inputCfg.bufferProvider.getBuffer = NULL;
+ context->config.inputCfg.bufferProvider.releaseBuffer = NULL;
+ context->config.inputCfg.bufferProvider.cookie = NULL;
+ context->config.inputCfg.mask = EFFECT_CONFIG_ALL;
+ context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE;
+ context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO;
+ context->config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT;
+ context->config.outputCfg.samplingRate = 44100;
+ context->config.outputCfg.bufferProvider.getBuffer = NULL;
+ context->config.outputCfg.bufferProvider.releaseBuffer = NULL;
+ context->config.outputCfg.bufferProvider.cookie = NULL;
+ context->config.outputCfg.mask = EFFECT_CONFIG_ALL;
+
+ visu_ctxt->capture_size = VISUALIZER_CAPTURE_SIZE_MAX;
+ visu_ctxt->scaling_mode = VISUALIZER_SCALING_MODE_NORMALIZED;
+
+ // measurement initialization
+ visu_ctxt->channel_count = popcount(context->config.inputCfg.channels);
+ visu_ctxt->meas_mode = MEASUREMENT_MODE_NONE;
+ visu_ctxt->meas_wndw_size_in_buffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS;
+ visu_ctxt->meas_buffer_idx = 0;
+ for (i=0 ; i<visu_ctxt->meas_wndw_size_in_buffers ; i++) {
+ visu_ctxt->past_meas[i].is_valid = false;
+ visu_ctxt->past_meas[i].peak_u16 = 0;
+ visu_ctxt->past_meas[i].rms_squared = 0;
+ }
+
+ set_config(context, &context->config);
+
+ return 0;
+}
+
+int visualizer_get_parameter(effect_context_t *context, effect_param_t *p, uint32_t *size)
+{
+ visualizer_context_t *visu_ctxt = (visualizer_context_t *)context;
+
+ p->status = 0;
+ *size = sizeof(effect_param_t) + sizeof(uint32_t);
+ if (p->psize != sizeof(uint32_t)) {
+ p->status = -EINVAL;
+ return 0;
+ }
+ switch (*(uint32_t *)p->data) {
+ case VISUALIZER_PARAM_CAPTURE_SIZE:
+ ALOGV("%s get capture_size = %d", __func__, visu_ctxt->capture_size);
+ *((uint32_t *)p->data + 1) = visu_ctxt->capture_size;
+ p->vsize = sizeof(uint32_t);
+ *size += sizeof(uint32_t);
+ break;
+ case VISUALIZER_PARAM_SCALING_MODE:
+ ALOGV("%s get scaling_mode = %d", __func__, visu_ctxt->scaling_mode);
+ *((uint32_t *)p->data + 1) = visu_ctxt->scaling_mode;
+ p->vsize = sizeof(uint32_t);
+ *size += sizeof(uint32_t);
+ break;
+ case VISUALIZER_PARAM_MEASUREMENT_MODE:
+ ALOGV("%s get meas_mode = %d", __func__, visu_ctxt->meas_mode);
+ *((uint32_t *)p->data + 1) = visu_ctxt->meas_mode;
+ p->vsize = sizeof(uint32_t);
+ *size += sizeof(uint32_t);
+ break;
+ default:
+ p->status = -EINVAL;
+ }
+ return 0;
+}
+
+int visualizer_set_parameter(effect_context_t *context, effect_param_t *p, uint32_t size)
+{
+ visualizer_context_t *visu_ctxt = (visualizer_context_t *)context;
+
+ if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t))
+ return -EINVAL;
+
+ switch (*(uint32_t *)p->data) {
+ case VISUALIZER_PARAM_CAPTURE_SIZE:
+ visu_ctxt->capture_size = *((uint32_t *)p->data + 1);
+ ALOGV("%s set capture_size = %d", __func__, visu_ctxt->capture_size);
+ break;
+ case VISUALIZER_PARAM_SCALING_MODE:
+ visu_ctxt->scaling_mode = *((uint32_t *)p->data + 1);
+ ALOGV("%s set scaling_mode = %d", __func__, visu_ctxt->scaling_mode);
+ break;
+ case VISUALIZER_PARAM_LATENCY:
+ /* Ignore latency as we capture at DSP output
+ * visu_ctxt->latency = *((uint32_t *)p->data + 1); */
+ ALOGV("%s set latency = %d", __func__, visu_ctxt->latency);
+ break;
+ case VISUALIZER_PARAM_MEASUREMENT_MODE:
+ visu_ctxt->meas_mode = *((uint32_t *)p->data + 1);
+ ALOGV("%s set meas_mode = %d", __func__, visu_ctxt->meas_mode);
+ break;
+ default:
+ return -EINVAL;
+ }
+ return 0;
+}
+
+/* Real process function called from capture thread. Called with lock held */
+int visualizer_process(effect_context_t *context,
+ audio_buffer_t *inBuffer,
+ audio_buffer_t *outBuffer)
+{
+ visualizer_context_t *visu_ctxt = (visualizer_context_t *)context;
+
+ if (!effect_exists(context))
+ return -EINVAL;
+
+ if (inBuffer == NULL || inBuffer->raw == NULL ||
+ outBuffer == NULL || outBuffer->raw == NULL ||
+ inBuffer->frameCount != outBuffer->frameCount ||
+ inBuffer->frameCount == 0) {
+ return -EINVAL;
+ }
+
+ // perform measurements if needed
+ if (visu_ctxt->meas_mode & MEASUREMENT_MODE_PEAK_RMS) {
+ // find the peak and RMS squared for the new buffer
+ uint32_t inIdx;
+ int16_t max_sample = 0;
+ float rms_squared_acc = 0;
+ for (inIdx = 0 ; inIdx < inBuffer->frameCount * visu_ctxt->channel_count ; inIdx++) {
+ if (inBuffer->s16[inIdx] > max_sample) {
+ max_sample = inBuffer->s16[inIdx];
+ } else if (-inBuffer->s16[inIdx] > max_sample) {
+ max_sample = -inBuffer->s16[inIdx];
+ }
+ rms_squared_acc += (inBuffer->s16[inIdx] * inBuffer->s16[inIdx]);
+ }
+ // store the measurement
+ visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].peak_u16 = (uint16_t)max_sample;
+ visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].rms_squared =
+ rms_squared_acc / (inBuffer->frameCount * visu_ctxt->channel_count);
+ visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].is_valid = true;
+ if (++visu_ctxt->meas_buffer_idx >= visu_ctxt->meas_wndw_size_in_buffers) {
+ visu_ctxt->meas_buffer_idx = 0;
+ }
+ }
+
+ /* all code below assumes stereo 16 bit PCM output and input */
+ int32_t shift;
+
+ if (visu_ctxt->scaling_mode == VISUALIZER_SCALING_MODE_NORMALIZED) {
+ /* derive capture scaling factor from peak value in current buffer
+ * this gives more interesting captures for display. */
+ shift = 32;
+ int len = inBuffer->frameCount * 2;
+ int i;
+ for (i = 0; i < len; i++) {
+ int32_t smp = inBuffer->s16[i];
+ if (smp < 0) smp = -smp - 1; /* take care to keep the max negative in range */
+ int32_t clz = __builtin_clz(smp);
+ if (shift > clz) shift = clz;
+ }
+ /* A maximum amplitude signal will have 17 leading zeros, which we want to
+ * translate to a shift of 8 (for converting 16 bit to 8 bit) */
+ shift = 25 - shift;
+ /* Never scale by less than 8 to avoid returning unaltered PCM signal. */
+ if (shift < 3) {
+ shift = 3;
+ }
+ /* add one to combine the division by 2 needed after summing
+ * left and right channels below */
+ shift++;
+ } else {
+ assert(visu_ctxt->scaling_mode == VISUALIZER_SCALING_MODE_AS_PLAYED);
+ shift = 9;
+ }
+
+ uint32_t capt_idx;
+ uint32_t in_idx;
+ uint8_t *buf = visu_ctxt->capture_buf;
+ for (in_idx = 0, capt_idx = visu_ctxt->capture_idx;
+ in_idx < inBuffer->frameCount;
+ in_idx++, capt_idx++) {
+ if (capt_idx >= CAPTURE_BUF_SIZE) {
+ /* wrap around */
+ capt_idx = 0;
+ }
+ int32_t smp = inBuffer->s16[2 * in_idx] + inBuffer->s16[2 * in_idx + 1];
+ smp = smp >> shift;
+ buf[capt_idx] = ((uint8_t)smp)^0x80;
+ }
+
+ /* XXX the following two should really be atomic, though it probably doesn't
+ * matter much for visualization purposes */
+ visu_ctxt->capture_idx = capt_idx;
+ /* update last buffer update time stamp */
+ if (clock_gettime(CLOCK_MONOTONIC, &visu_ctxt->buffer_update_time) < 0) {
+ visu_ctxt->buffer_update_time.tv_sec = 0;
+ }
+
+ if (context->state != EFFECT_STATE_ACTIVE) {
+ ALOGV("%s DONE inactive", __func__);
+ return -ENODATA;
+ }
+
+ return 0;
+}
+
+int visualizer_command(effect_context_t * context, uint32_t cmdCode, uint32_t cmdSize,
+ void *pCmdData, uint32_t *replySize, void *pReplyData)
+{
+ visualizer_context_t * visu_ctxt = (visualizer_context_t *)context;
+
+ switch (cmdCode) {
+ case VISUALIZER_CMD_CAPTURE:
+ if (pReplyData == NULL || *replySize != visu_ctxt->capture_size) {
+ ALOGV("%s VISUALIZER_CMD_CAPTURE error *replySize %d context->capture_size %d",
+ __func__, *replySize, visu_ctxt->capture_size);
+ return -EINVAL;
+ }
+
+ if (!context->offload_enabled)
+ break;
+
+ if (context->state == EFFECT_STATE_ACTIVE) {
+ int32_t latency_ms = visu_ctxt->latency;
+ const uint32_t delta_ms = visualizer_get_delta_time_ms_from_updated_time(visu_ctxt);
+ latency_ms -= delta_ms;
+ if (latency_ms < 0) {
+ latency_ms = 0;
+ }
+ const uint32_t delta_smp = context->config.inputCfg.samplingRate * latency_ms / 1000;
+
+ int32_t capture_point = visu_ctxt->capture_idx - visu_ctxt->capture_size - delta_smp;
+ int32_t capture_size = visu_ctxt->capture_size;
+ if (capture_point < 0) {
+ int32_t size = -capture_point;
+ if (size > capture_size)
+ size = capture_size;
+
+ memcpy(pReplyData,
+ visu_ctxt->capture_buf + CAPTURE_BUF_SIZE + capture_point,
+ size);
+ pReplyData = (void *)((size_t)pReplyData + size);
+ capture_size -= size;
+ capture_point = 0;
+ }
+ memcpy(pReplyData,
+ visu_ctxt->capture_buf + capture_point,
+ capture_size);
+
+
+ /* if audio framework has stopped playing audio although the effect is still
+ * active we must clear the capture buffer to return silence */
+ if ((visu_ctxt->last_capture_idx == visu_ctxt->capture_idx) &&
+ (visu_ctxt->buffer_update_time.tv_sec != 0)) {
+ if (delta_ms > MAX_STALL_TIME_MS) {
+ ALOGV("%s capture going to idle", __func__);
+ visu_ctxt->buffer_update_time.tv_sec = 0;
+ memset(pReplyData, 0x80, visu_ctxt->capture_size);
+ }
+ }
+ visu_ctxt->last_capture_idx = visu_ctxt->capture_idx;
+ } else {
+ memset(pReplyData, 0x80, visu_ctxt->capture_size);
+ }
+ break;
+
+ case VISUALIZER_CMD_MEASURE: {
+ uint16_t peak_u16 = 0;
+ float sum_rms_squared = 0.0f;
+ uint8_t nb_valid_meas = 0;
+ /* reset measurements if last measurement was too long ago (which implies stored
+ * measurements aren't relevant anymore and shouldn't bias the new one) */
+ const int32_t delay_ms = visualizer_get_delta_time_ms_from_updated_time(visu_ctxt);
+ if (delay_ms > DISCARD_MEASUREMENTS_TIME_MS) {
+ uint32_t i;
+ ALOGV("Discarding measurements, last measurement is %dms old", delay_ms);
+ for (i=0 ; i<visu_ctxt->meas_wndw_size_in_buffers ; i++) {
+ visu_ctxt->past_meas[i].is_valid = false;
+ visu_ctxt->past_meas[i].peak_u16 = 0;
+ visu_ctxt->past_meas[i].rms_squared = 0;
+ }
+ visu_ctxt->meas_buffer_idx = 0;
+ } else {
+ /* only use actual measurements, otherwise the first RMS measure happening before
+ * MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially
+ * low */
+ uint32_t i;
+ for (i=0 ; i < visu_ctxt->meas_wndw_size_in_buffers ; i++) {
+ if (visu_ctxt->past_meas[i].is_valid) {
+ if (visu_ctxt->past_meas[i].peak_u16 > peak_u16) {
+ peak_u16 = visu_ctxt->past_meas[i].peak_u16;
+ }
+ sum_rms_squared += visu_ctxt->past_meas[i].rms_squared;
+ nb_valid_meas++;
+ }
+ }
+ }
+ float rms = nb_valid_meas == 0 ? 0.0f : sqrtf(sum_rms_squared / nb_valid_meas);
+ int32_t* p_int_reply_data = (int32_t*)pReplyData;
+ /* convert from I16 sample values to mB and write results */
+ if (rms < 0.000016f) {
+ p_int_reply_data[MEASUREMENT_IDX_RMS] = -9600; //-96dB
+ } else {
+ p_int_reply_data[MEASUREMENT_IDX_RMS] = (int32_t) (2000 * log10(rms / 32767.0f));
+ }
+ if (peak_u16 == 0) {
+ p_int_reply_data[MEASUREMENT_IDX_PEAK] = -9600; //-96dB
+ } else {
+ p_int_reply_data[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peak_u16 / 32767.0f));
+ }
+ ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)",
+ peak_u16, p_int_reply_data[MEASUREMENT_IDX_PEAK],
+ rms, p_int_reply_data[MEASUREMENT_IDX_RMS]);
+ }
+ break;
+
+ default:
+ ALOGW("%s invalid command %d", __func__, cmdCode);
+ return -EINVAL;
+ }
+ return 0;
+}
+
+
+/*
+ * Effect Library Interface Implementation
+ */
+
+int effect_lib_create(const effect_uuid_t *uuid,
+ int32_t sessionId,
+ int32_t ioId,
+ effect_handle_t *pHandle) {
+ int ret;
+ int i;
+
+ if (lib_init() != 0)
+ return init_status;
+
+ if (pHandle == NULL || uuid == NULL)
+ return -EINVAL;
+
+ for (i = 0; descriptors[i] != NULL; i++) {
+ if (memcmp(uuid, &descriptors[i]->uuid, sizeof(effect_uuid_t)) == 0)
+ break;
+ }
+
+ if (descriptors[i] == NULL)
+ return -EINVAL;
+
+ effect_context_t *context;
+ if (memcmp(uuid, &visualizer_descriptor.uuid, sizeof(effect_uuid_t)) == 0) {
+ visualizer_context_t *visu_ctxt = (visualizer_context_t *)calloc(1,
+ sizeof(visualizer_context_t));
+ context = (effect_context_t *)visu_ctxt;
+ context->ops.init = visualizer_init;
+ context->ops.reset = visualizer_reset;
+ context->ops.process = visualizer_process;
+ context->ops.set_parameter = visualizer_set_parameter;
+ context->ops.get_parameter = visualizer_get_parameter;
+ context->ops.command = visualizer_command;
+ } else {
+ return -EINVAL;
+ }
+
+ context->itfe = &effect_interface;
+ context->state = EFFECT_STATE_UNINITIALIZED;
+ context->out_handle = (audio_io_handle_t)ioId;
+ context->desc = &visualizer_descriptor;
+
+ ret = context->ops.init(context);
+ if (ret < 0) {
+ ALOGW("%s init failed", __func__);
+ free(context);
+ return ret;
+ }
+
+ context->state = EFFECT_STATE_INITIALIZED;
+
+ pthread_mutex_lock(&lock);
+ list_add_tail(&created_effects_list, &context->effects_list_node);
+ output_context_t *out_ctxt = get_output(ioId);
+ if (out_ctxt != NULL)
+ add_effect_to_output(out_ctxt, context);
+ pthread_mutex_unlock(&lock);
+
+ *pHandle = (effect_handle_t)context;
+
+ ALOGV("%s created context %p", __func__, context);
+
+ return 0;
+
+}
+
+int effect_lib_release(effect_handle_t handle) {
+ effect_context_t *context = (effect_context_t *)handle;
+ int status;
+
+ if (lib_init() != 0)
+ return init_status;
+
+ ALOGV("%s context %p", __func__, handle);
+ pthread_mutex_lock(&lock);
+ status = -EINVAL;
+ if (effect_exists(context)) {
+ output_context_t *out_ctxt = get_output(context->out_handle);
+ if (out_ctxt != NULL)
+ remove_effect_from_output(out_ctxt, context);
+ list_remove(&context->effects_list_node);
+ if (context->ops.release)
+ context->ops.release(context);
+ free(context);
+ status = 0;
+ }
+ pthread_mutex_unlock(&lock);
+
+ return status;
+}
+
+int effect_lib_get_descriptor(const effect_uuid_t *uuid,
+ effect_descriptor_t *descriptor) {
+ int i;
+
+ if (lib_init() != 0)
+ return init_status;
+
+ if (descriptor == NULL || uuid == NULL) {
+ ALOGV("%s called with NULL pointer", __func__);
+ return -EINVAL;
+ }
+
+ for (i = 0; descriptors[i] != NULL; i++) {
+ if (memcmp(uuid, &descriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
+ *descriptor = *descriptors[i];
+ return 0;
+ }
+ }
+
+ return -EINVAL;
+}
+
+/*
+ * Effect Control Interface Implementation
+ */
+
+ /* Stub function for effect interface: never called for offloaded effects */
+int effect_process(effect_handle_t self,
+ audio_buffer_t *inBuffer,
+ audio_buffer_t *outBuffer)
+{
+ effect_context_t * context = (effect_context_t *)self;
+ int status = 0;
+
+ ALOGW("%s Called ?????", __func__);
+
+ pthread_mutex_lock(&lock);
+ if (!effect_exists(context)) {
+ status = -EINVAL;
+ goto exit;
+ }
+
+ if (context->state != EFFECT_STATE_ACTIVE) {
+ status = -EINVAL;
+ goto exit;
+ }
+
+exit:
+ pthread_mutex_unlock(&lock);
+ return status;
+}
+
+int effect_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
+ void *pCmdData, uint32_t *replySize, void *pReplyData)
+{
+
+ effect_context_t * context = (effect_context_t *)self;
+ int retsize;
+ int status = 0;
+
+ pthread_mutex_lock(&lock);
+
+ if (!effect_exists(context)) {
+ status = -EINVAL;
+ goto exit;
+ }
+
+ if (context == NULL || context->state == EFFECT_STATE_UNINITIALIZED) {
+ status = -EINVAL;
+ goto exit;
+ }
+
+// ALOGV_IF(cmdCode != VISUALIZER_CMD_CAPTURE,
+// "%s command %d cmdSize %d", __func__, cmdCode, cmdSize);
+
+ switch (cmdCode) {
+ case EFFECT_CMD_INIT:
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ if (context->ops.init)
+ *(int *) pReplyData = context->ops.init(context);
+ else
+ *(int *) pReplyData = 0;
+ break;
+ case EFFECT_CMD_SET_CONFIG:
+ if (pCmdData == NULL || cmdSize != sizeof(effect_config_t)
+ || pReplyData == NULL || *replySize != sizeof(int)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ *(int *) pReplyData = set_config(context, (effect_config_t *) pCmdData);
+ break;
+ case EFFECT_CMD_GET_CONFIG:
+ if (pReplyData == NULL ||
+ *replySize != sizeof(effect_config_t)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ if (!context->offload_enabled) {
+ status = -EINVAL;
+ goto exit;
+ }
+
+ get_config(context, (effect_config_t *)pReplyData);
+ break;
+ case EFFECT_CMD_RESET:
+ if (context->ops.reset)
+ context->ops.reset(context);
+ break;
+ case EFFECT_CMD_ENABLE:
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ if (context->state != EFFECT_STATE_INITIALIZED) {
+ status = -ENOSYS;
+ goto exit;
+ }
+ context->state = EFFECT_STATE_ACTIVE;
+ if (context->ops.enable)
+ context->ops.enable(context);
+ pthread_cond_signal(&cond);
+ ALOGV("%s EFFECT_CMD_ENABLE", __func__);
+ *(int *)pReplyData = 0;
+ break;
+ case EFFECT_CMD_DISABLE:
+ if (pReplyData == NULL || *replySize != sizeof(int)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ if (context->state != EFFECT_STATE_ACTIVE) {
+ status = -ENOSYS;
+ goto exit;
+ }
+ context->state = EFFECT_STATE_INITIALIZED;
+ if (context->ops.disable)
+ context->ops.disable(context);
+ pthread_cond_signal(&cond);
+ ALOGV("%s EFFECT_CMD_DISABLE", __func__);
+ *(int *)pReplyData = 0;
+ break;
+ case EFFECT_CMD_GET_PARAM: {
+ if (pCmdData == NULL ||
+ cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) ||
+ pReplyData == NULL ||
+ *replySize < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) {
+ status = -EINVAL;
+ goto exit;
+ }
+ if (!context->offload_enabled) {
+ status = -EINVAL;
+ goto exit;
+ }
+ memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(uint32_t));
+ effect_param_t *p = (effect_param_t *)pReplyData;
+ if (context->ops.get_parameter)
+ context->ops.get_parameter(context, p, replySize);
+ } break;
+ case EFFECT_CMD_SET_PARAM: {
+ if (pCmdData == NULL ||
+ cmdSize != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) ||
+ pReplyData == NULL || *replySize != sizeof(int32_t)) {
+ status = -EINVAL;
+ goto exit;
+ }
+ *(int32_t *)pReplyData = 0;
+ effect_param_t *p = (effect_param_t *)pCmdData;
+ if (context->ops.set_parameter)
+ *(int32_t *)pReplyData = context->ops.set_parameter(context, p, *replySize);
+
+ } break;
+ case EFFECT_CMD_SET_DEVICE:
+ case EFFECT_CMD_SET_VOLUME:
+ case EFFECT_CMD_SET_AUDIO_MODE:
+ break;
+
+ case EFFECT_CMD_OFFLOAD: {
+ output_context_t *out_ctxt;
+
+ if (cmdSize != sizeof(effect_offload_param_t) || pCmdData == NULL
+ || pReplyData == NULL || *replySize != sizeof(int)) {
+ ALOGV("%s EFFECT_CMD_OFFLOAD bad format", __func__);
+ status = -EINVAL;
+ break;
+ }
+
+ effect_offload_param_t* offload_param = (effect_offload_param_t*)pCmdData;
+
+ ALOGV("%s EFFECT_CMD_OFFLOAD offload %d output %d",
+ __func__, offload_param->isOffload, offload_param->ioHandle);
+
+ *(int *)pReplyData = 0;
+
+ context->offload_enabled = offload_param->isOffload;
+ if (context->out_handle == offload_param->ioHandle)
+ break;
+
+ out_ctxt = get_output(context->out_handle);
+ if (out_ctxt != NULL)
+ remove_effect_from_output(out_ctxt, context);
+ out_ctxt = get_output(offload_param->ioHandle);
+ if (out_ctxt != NULL)
+ add_effect_to_output(out_ctxt, context);
+
+ context->out_handle = offload_param->ioHandle;
+
+ } break;
+
+
+ default:
+ if (cmdCode >= EFFECT_CMD_FIRST_PROPRIETARY && context->ops.command)
+ status = context->ops.command(context, cmdCode, cmdSize,
+ pCmdData, replySize, pReplyData);
+ else {
+ ALOGW("%s invalid command %d", __func__, cmdCode);
+ status = -EINVAL;
+ }
+ break;
+ }
+
+exit:
+ pthread_mutex_unlock(&lock);
+
+// ALOGV_IF(cmdCode != VISUALIZER_CMD_CAPTURE,"%s DONE", __func__);
+ return status;
+}
+
+/* Effect Control Interface Implementation: get_descriptor */
+int effect_get_descriptor(effect_handle_t self,
+ effect_descriptor_t *descriptor)
+{
+ effect_context_t *context = (effect_context_t *)self;
+
+ if (!effect_exists(context))
+ return -EINVAL;
+
+ if (descriptor == NULL)
+ return -EINVAL;
+
+ *descriptor = *context->desc;
+
+ return 0;
+}
+
+/* effect_handle_t interface implementation for visualizer effect */
+const struct effect_interface_s effect_interface = {
+ effect_process,
+ effect_command,
+ effect_get_descriptor,
+ NULL,
+};
+
+__attribute__ ((visibility ("default")))
+audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
+ tag : AUDIO_EFFECT_LIBRARY_TAG,
+ version : EFFECT_LIBRARY_API_VERSION,
+ name : "Visualizer Library",
+ implementor : "The Android Open Source Project",
+ create_effect : effect_lib_create,
+ release_effect : effect_lib_release,
+ get_descriptor : effect_lib_get_descriptor,
+};