@@ -88,7 +88,7 @@ static int cbc_aes_nx_crypt(struct skcipher_request *req,
memcpy(req->iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE);
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -391,7 +391,7 @@ static int ccm_nx_decrypt(struct aead_request *req,
/* update stats */
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -460,7 +460,7 @@ static int ccm_nx_encrypt(struct aead_request *req,
/* update stats */
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -102,7 +102,7 @@ static int ctr_aes_nx_crypt(struct skcipher_request *req, u8 *iv)
memcpy(iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE);
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -86,7 +86,7 @@ static int ecb_aes_nx_crypt(struct skcipher_request *req,
goto out;
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -382,7 +382,7 @@ static int gcm_aes_nx_crypt(struct aead_request *req, int enc,
NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
atomic_inc(&(nx_ctx->stats->aes_ops));
- atomic64_add(csbcpb->csb.processed_byte_count,
+ atomic64_add(be32_to_cpu(csbcpb->csb.processed_byte_count),
&(nx_ctx->stats->aes_bytes));
processed += to_process;
@@ -16,6 +16,11 @@
#include "nx_csbcpb.h"
#include "nx.h"
+struct sha256_state_be {
+ __be32 state[SHA256_DIGEST_SIZE / 4];
+ u64 count;
+ u8 buf[SHA256_BLOCK_SIZE];
+};
static int nx_crypto_ctx_sha256_init(struct crypto_tfm *tfm)
{
@@ -36,7 +41,7 @@ static int nx_crypto_ctx_sha256_init(struct crypto_tfm *tfm)
}
static int nx_sha256_init(struct shash_desc *desc) {
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct sha256_state_be *sctx = shash_desc_ctx(desc);
memset(sctx, 0, sizeof *sctx);
@@ -56,7 +61,7 @@ static int nx_sha256_init(struct shash_desc *desc) {
static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct sha256_state_be *sctx = shash_desc_ctx(desc);
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
struct nx_sg *out_sg;
@@ -175,7 +180,7 @@ static int nx_sha256_update(struct shash_desc *desc, const u8 *data,
static int nx_sha256_final(struct shash_desc *desc, u8 *out)
{
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct sha256_state_be *sctx = shash_desc_ctx(desc);
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
struct nx_sg *in_sg, *out_sg;
@@ -245,7 +250,7 @@ static int nx_sha256_final(struct shash_desc *desc, u8 *out)
static int nx_sha256_export(struct shash_desc *desc, void *out)
{
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct sha256_state_be *sctx = shash_desc_ctx(desc);
memcpy(out, sctx, sizeof(*sctx));
@@ -254,7 +259,7 @@ static int nx_sha256_export(struct shash_desc *desc, void *out)
static int nx_sha256_import(struct shash_desc *desc, const void *in)
{
- struct sha256_state *sctx = shash_desc_ctx(desc);
+ struct sha256_state_be *sctx = shash_desc_ctx(desc);
memcpy(sctx, in, sizeof(*sctx));
@@ -268,8 +273,8 @@ struct shash_alg nx_shash_sha256_alg = {
.final = nx_sha256_final,
.export = nx_sha256_export,
.import = nx_sha256_import,
- .descsize = sizeof(struct sha256_state),
- .statesize = sizeof(struct sha256_state),
+ .descsize = sizeof(struct sha256_state_be),
+ .statesize = sizeof(struct sha256_state_be),
.base = {
.cra_name = "sha256",
.cra_driver_name = "sha256-nx",
@@ -15,6 +15,11 @@
#include "nx_csbcpb.h"
#include "nx.h"
+struct sha512_state_be {
+ __be64 state[SHA512_DIGEST_SIZE / 8];
+ u64 count[2];
+ u8 buf[SHA512_BLOCK_SIZE];
+};
static int nx_crypto_ctx_sha512_init(struct crypto_tfm *tfm)
{
@@ -36,7 +41,7 @@ static int nx_crypto_ctx_sha512_init(struct crypto_tfm *tfm)
static int nx_sha512_init(struct shash_desc *desc)
{
- struct sha512_state *sctx = shash_desc_ctx(desc);
+ struct sha512_state_be *sctx = shash_desc_ctx(desc);
memset(sctx, 0, sizeof *sctx);
@@ -56,7 +61,7 @@ static int nx_sha512_init(struct shash_desc *desc)
static int nx_sha512_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- struct sha512_state *sctx = shash_desc_ctx(desc);
+ struct sha512_state_be *sctx = shash_desc_ctx(desc);
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
struct nx_sg *out_sg;
@@ -178,7 +183,7 @@ static int nx_sha512_update(struct shash_desc *desc, const u8 *data,
static int nx_sha512_final(struct shash_desc *desc, u8 *out)
{
- struct sha512_state *sctx = shash_desc_ctx(desc);
+ struct sha512_state_be *sctx = shash_desc_ctx(desc);
struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base);
struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
struct nx_sg *in_sg, *out_sg;
@@ -251,7 +256,7 @@ static int nx_sha512_final(struct shash_desc *desc, u8 *out)
static int nx_sha512_export(struct shash_desc *desc, void *out)
{
- struct sha512_state *sctx = shash_desc_ctx(desc);
+ struct sha512_state_be *sctx = shash_desc_ctx(desc);
memcpy(out, sctx, sizeof(*sctx));
@@ -260,7 +265,7 @@ static int nx_sha512_export(struct shash_desc *desc, void *out)
static int nx_sha512_import(struct shash_desc *desc, const void *in)
{
- struct sha512_state *sctx = shash_desc_ctx(desc);
+ struct sha512_state_be *sctx = shash_desc_ctx(desc);
memcpy(sctx, in, sizeof(*sctx));
@@ -274,8 +279,8 @@ struct shash_alg nx_shash_sha512_alg = {
.final = nx_sha512_final,
.export = nx_sha512_export,
.import = nx_sha512_import,
- .descsize = sizeof(struct sha512_state),
- .statesize = sizeof(struct sha512_state),
+ .descsize = sizeof(struct sha512_state_be),
+ .statesize = sizeof(struct sha512_state_be),
.base = {
.cra_name = "sha512",
.cra_driver_name = "sha512-nx",
@@ -140,8 +140,8 @@ struct cop_status_block {
u8 crb_seq_number;
u8 completion_code;
u8 completion_extension;
- u32 processed_byte_count;
- u64 address;
+ __be32 processed_byte_count;
+ __be64 address;
} __packed;
/* Nest accelerator workbook section 4.4 */
The nx driver started out its life as a BE-only driver. However, somewhere along the way LE support was partially added. This never seems to have been extended all the way but it does trigger numerous warnings during build. This patch fixes all those warnings, but it doesn't mean that the driver will work on LE. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>