inst->alg.base.cra_blocksize = 1;
        inst->alg.base.cra_alignmask = alg->cra_alignmask;
 
+       /*
+        * To simplify the implementation, configure the skcipher walk to only
+        * give a partial block at the very end, never earlier.
+        */
+       inst->alg.chunksize = alg->cra_blocksize;
+
        inst->alg.ivsize = alg->cra_blocksize;
        inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
        inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
 
                          "\x75\xa3\x85\x74\x1a\xb9\xce\xf8"
                          "\x20\x31\x62\x3d\x55\xb1\xe4\x71",
                .len    = 64,
+               .also_non_np = 1,
+               .np     = 2,
+               .tap    = { 31, 33 },
+       }, { /* > 16 bytes, not a multiple of 16 bytes */
+               .key    = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+                         "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+               .klen   = 16,
+               .iv     = "\x00\x01\x02\x03\x04\x05\x06\x07"
+                         "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+               .ptext  = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+                         "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+                         "\xae",
+               .ctext  = "\x3b\x3f\xd9\x2e\xb7\x2d\xad\x20"
+                         "\x33\x34\x49\xf8\xe8\x3c\xfb\x4a"
+                         "\xc8",
+               .len    = 17,
+       }, { /* < 16 bytes */
+               .key    = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+                         "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+               .klen   = 16,
+               .iv     = "\x00\x01\x02\x03\x04\x05\x06\x07"
+                         "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+               .ptext  = "\x6b\xc1\xbe\xe2\x2e\x40\x9f",
+               .ctext  = "\x3b\x3f\xd9\x2e\xb7\x2d\xad",
+               .len    = 7,
        },
 };