@@ -67,12 +67,17 @@ hantro_h1_jpeg_enc_set_qtable(struct hantro_dev *vpu,
unsigned char *chroma_qtable)
{
u32 reg, i;
+ __be32 *luma_qtable_p;
+ __be32 *chroma_qtable_p;
+
+ luma_qtable_p = (__be32 *)luma_qtable;
+ chroma_qtable_p = (__be32 *)chroma_qtable;
for (i = 0; i < H1_JPEG_QUANT_TABLE_COUNT; i++) {
- reg = get_unaligned_be32(&luma_qtable[i]);
+ reg = get_unaligned_be32(&luma_qtable_p[i]);
vepu_write_relaxed(vpu, reg, H1_REG_JPEG_LUMA_QUAT(i));
- reg = get_unaligned_be32(&chroma_qtable[i]);
+ reg = get_unaligned_be32(&chroma_qtable_p[i]);
vepu_write_relaxed(vpu, reg, H1_REG_JPEG_CHROMA_QUAT(i));
}
}
@@ -98,12 +98,17 @@ rk3399_vpu_jpeg_enc_set_qtable(struct hantro_dev *vpu,
unsigned char *chroma_qtable)
{
u32 reg, i;
+ __be32 *luma_qtable_p;
+ __be32 *chroma_qtable_p;
+
+ luma_qtable_p = (__be32 *)luma_qtable;
+ chroma_qtable_p = (__be32 *)chroma_qtable;
for (i = 0; i < VEPU_JPEG_QUANT_TABLE_COUNT; i++) {
- reg = get_unaligned_be32(&luma_qtable[i]);
+ reg = get_unaligned_be32(&luma_qtable_p[i]);
vepu_write_relaxed(vpu, reg, VEPU_REG_JPEG_LUMA_QUAT(i));
- reg = get_unaligned_be32(&chroma_qtable[i]);
+ reg = get_unaligned_be32(&chroma_qtable_p[i]);
vepu_write_relaxed(vpu, reg, VEPU_REG_JPEG_CHROMA_QUAT(i));
}
}