]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blobdiff - crypto/shash.c
crypto: hmac - require that the underlying hash algorithm is unkeyed
[mirror_ubuntu-artful-kernel.git] / crypto / shash.c
index a051541a4a1718c996ba7a7b678b5b9e5e857488..5c9e4b6c6d1520dfb2ad119d0ba634396716180a 100644 (file)
 #include <linux/seq_file.h>
 #include <linux/cryptouser.h>
 #include <net/netlink.h>
+#include <linux/compiler.h>
 
 #include "internal.h"
 
 static const struct crypto_type crypto_shash_type;
 
-static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
-                          unsigned int keylen)
+int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
+                   unsigned int keylen)
 {
        return -ENOSYS;
 }
+EXPORT_SYMBOL_GPL(shash_no_setkey);
 
 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
                                  unsigned int keylen)
@@ -67,7 +69,7 @@ EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 static inline unsigned int shash_align_buffer_size(unsigned len,
                                                   unsigned long mask)
 {
-       typedef u8 __attribute__ ((aligned)) u8_aligned;
+       typedef u8 __aligned_largest u8_aligned;
        return len + (mask & ~(__alignof__(u8_aligned) - 1));
 }
 
@@ -80,7 +82,7 @@ static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
        unsigned int unaligned_len = alignmask + 1 -
                                     ((unsigned long)data & alignmask);
        u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
-               __attribute__ ((aligned));
+               __aligned_largest;
        u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
        int err;
 
@@ -116,7 +118,7 @@ static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
        struct shash_alg *shash = crypto_shash_alg(tfm);
        unsigned int ds = crypto_shash_digestsize(tfm);
        u8 ubuf[shash_align_buffer_size(ds, alignmask)]
-               __attribute__ ((aligned));
+               __aligned_largest;
        u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
        int err;
 
@@ -274,12 +276,14 @@ static int shash_async_finup(struct ahash_request *req)
 
 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
 {
-       struct scatterlist *sg = req->src;
-       unsigned int offset = sg->offset;
        unsigned int nbytes = req->nbytes;
+       struct scatterlist *sg;
+       unsigned int offset;
        int err;
 
-       if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
+       if (nbytes &&
+           (sg = req->src, offset = sg->offset,
+            nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
                void *data;
 
                data = kmap_atomic(sg_page(sg));
@@ -403,7 +407,7 @@ static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
 #endif
 
 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
-       __attribute__ ((unused));
+       __maybe_unused;
 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
 {
        struct shash_alg *salg = __crypto_shash_alg(alg);