]> git.proxmox.com Git - ceph.git/blame - ceph/src/rgw/rgw_crypt.cc
add subtree-ish sources for 12.0.3
[ceph.git] / ceph / src / rgw / rgw_crypt.cc
CommitLineData
7c673cae
FG
1// -*- mode:C; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*-
2// vim: ts=8 sw=2 smarttab
3/**
4 * Crypto filters for Put/Post/Get operations.
5 */
6#include <rgw/rgw_op.h>
7#include <rgw/rgw_crypt.h>
8#include <auth/Crypto.h>
9#include <rgw/rgw_b64.h>
10#include <rgw/rgw_rest_s3.h>
11#include "include/assert.h"
12#include <boost/utility/string_ref.hpp>
13#include <rgw/rgw_keystone.h>
14#include "include/str_map.h"
15#include "crypto/crypto_accel.h"
16#include "crypto/crypto_plugin.h"
17#ifdef USE_NSS
18# include <nspr.h>
19# include <nss.h>
20# include <pk11pub.h>
21#endif
22
23#ifdef USE_CRYPTOPP
24#include <cryptopp/cryptlib.h>
25#include <cryptopp/modes.h>
26#include <cryptopp/aes.h>
27using namespace CryptoPP;
28#endif
29
30#define dout_context g_ceph_context
31#define dout_subsys ceph_subsys_rgw
32
33using namespace rgw;
34
35/**
36 * Encryption in CTR mode. offset is used as IV for each block.
37 */
38#warning "TODO: move this code to auth/Crypto for others to reuse."
39
40class AES_256_CTR : public BlockCrypt {
41public:
42 static const size_t AES_256_KEYSIZE = 256 / 8;
43 static const size_t AES_256_IVSIZE = 128 / 8;
44private:
45 static const uint8_t IV[AES_256_IVSIZE];
46 CephContext* cct;
47 uint8_t key[AES_256_KEYSIZE];
48public:
49 AES_256_CTR(CephContext* cct): cct(cct) {
50 }
51 ~AES_256_CTR() {
52 memset(key, 0, AES_256_KEYSIZE);
53 }
54 bool set_key(const uint8_t* _key, size_t key_size) {
55 if (key_size != AES_256_KEYSIZE) {
56 return false;
57 }
58 memcpy(key, _key, AES_256_KEYSIZE);
59 return true;
60 }
61 size_t get_block_size() {
62 return AES_256_IVSIZE;
63 }
64
65#ifdef USE_CRYPTOPP
66
67 bool encrypt(bufferlist& input, off_t in_ofs, size_t size, bufferlist& output, off_t stream_offset) {
68 byte iv[AES_256_IVSIZE];
69 ldout(cct, 25)
70 << "Encrypt in_ofs " << in_ofs
71 << " size=" << size
72 << " stream_offset=" << stream_offset
73 << dendl;
74 if (input.length() < in_ofs + size) {
75 return false;
76 }
77
78 output.clear();
79 buffer::ptr buf((size + AES_256_KEYSIZE - 1) / AES_256_KEYSIZE * AES_256_KEYSIZE);
80 /*create CTR mask*/
81 prepare_iv(iv, stream_offset);
82 CTR_Mode< AES >::Encryption e;
83 e.SetKeyWithIV(key, AES_256_KEYSIZE, iv, AES_256_IVSIZE);
84 buf.zero();
85 e.ProcessData((byte*)buf.c_str(), (byte*)buf.c_str(), buf.length());
86 buf.set_length(size);
87 off_t plaintext_pos = in_ofs;
88 off_t crypt_pos = 0;
89 auto iter = input.buffers().begin();
90 //skip unaffected begin
91 while ((iter != input.buffers().end()) && (plaintext_pos >= iter->length())) {
92 plaintext_pos -= iter->length();
93 ++iter;
94 }
95 while (iter != input.buffers().end()) {
96 off_t cnt = std::min<off_t>(iter->length() - plaintext_pos, size - crypt_pos);
97 byte* src = (byte*)iter->c_str() + plaintext_pos;
98 byte* dst = (byte*)buf.c_str() + crypt_pos;
99 for (off_t i = 0; i < cnt; i++) {
100 dst[i] ^= src[i];
101 }
102 ++iter;
103 plaintext_pos = 0;
104 crypt_pos += cnt;
105 }
106 output.append(buf);
107 return true;
108 }
109
110#elif defined(USE_NSS)
111
112 bool encrypt(bufferlist& input, off_t in_ofs, size_t size, bufferlist& output, off_t stream_offset)
113 {
114 bool result = false;
115 PK11SlotInfo *slot;
116 SECItem keyItem;
117 PK11SymKey *symkey;
118 CK_AES_CTR_PARAMS ctr_params = {0};
119 SECItem ivItem;
120 SECItem *param;
121 SECStatus ret;
122 PK11Context *ectx;
123 int written;
124 unsigned int written2;
125
126 slot = PK11_GetBestSlot(CKM_AES_CTR, NULL);
127 if (slot) {
128 keyItem.type = siBuffer;
129 keyItem.data = key;
130 keyItem.len = AES_256_KEYSIZE;
131
132 symkey = PK11_ImportSymKey(slot, CKM_AES_CTR, PK11_OriginUnwrap, CKA_UNWRAP, &keyItem, NULL);
133 if (symkey) {
134 static_assert(sizeof(ctr_params.cb) >= AES_256_IVSIZE, "Must fit counter");
135 ctr_params.ulCounterBits = 128;
136 prepare_iv(reinterpret_cast<unsigned char*>(&ctr_params.cb), stream_offset);
137
138 ivItem.type = siBuffer;
139 ivItem.data = (unsigned char*)&ctr_params;
140 ivItem.len = sizeof(ctr_params);
141
142 param = PK11_ParamFromIV(CKM_AES_CTR, &ivItem);
143 if (param) {
144 ectx = PK11_CreateContextBySymKey(CKM_AES_CTR, CKA_ENCRYPT, symkey, param);
145 if (ectx) {
146 buffer::ptr buf((size + AES_256_KEYSIZE - 1) / AES_256_KEYSIZE * AES_256_KEYSIZE);
147 ret = PK11_CipherOp(ectx,
148 (unsigned char*)buf.c_str(), &written, buf.length(),
149 (unsigned char*)input.c_str() + in_ofs, size);
150 if (ret == SECSuccess) {
151 ret = PK11_DigestFinal(ectx,
152 (unsigned char*)buf.c_str() + written, &written2,
153 buf.length() - written);
154 if (ret == SECSuccess) {
155 buf.set_length(written + written2);
156 output.append(buf);
157 result = true;
158 }
159 }
160 PK11_DestroyContext(ectx, PR_TRUE);
161 }
162 SECITEM_FreeItem(param, PR_TRUE);
163 }
164 PK11_FreeSymKey(symkey);
165 }
166 PK11_FreeSlot(slot);
167 }
168 if (result == false) {
169 ldout(cct, 5) << "Failed to perform AES-CTR encryption: " << PR_GetError() << dendl;
170 }
171 return result;
172 }
173
174#else
175#error Must define USE_CRYPTOPP or USE_NSS
176#endif
177 /* in CTR encrypt is the same as decrypt */
178 bool decrypt(bufferlist& input, off_t in_ofs, size_t size, bufferlist& output, off_t stream_offset) {
179 return encrypt(input, in_ofs, size, output, stream_offset);
180 }
181
182 void prepare_iv(byte iv[AES_256_IVSIZE], off_t offset) {
183 off_t index = offset / AES_256_IVSIZE;
184 off_t i = AES_256_IVSIZE - 1;
185 unsigned int val;
186 unsigned int carry = 0;
187 while (i>=0) {
188 val = (index & 0xff) + IV[i] + carry;
189 iv[i] = val;
190 carry = val >> 8;
191 index = index >> 8;
192 i--;
193 }
194 }
195};
196
197const uint8_t AES_256_CTR::IV[AES_256_CTR::AES_256_IVSIZE] =
198 { 'a', 'e', 's', '2', '5', '6', 'i', 'v', '_', 'c', 't', 'r', '1', '3', '3', '7' };
199
200
201CryptoAccelRef get_crypto_accel(CephContext *cct)
202{
203 CryptoAccelRef ca_impl = nullptr;
204 stringstream ss;
205 PluginRegistry *reg = cct->get_plugin_registry();
206 string crypto_accel_type = cct->_conf->plugin_crypto_accelerator;
207
208 CryptoPlugin *factory = dynamic_cast<CryptoPlugin*>(reg->get_with_load("crypto", crypto_accel_type));
209 if (factory == nullptr) {
210 lderr(cct) << __func__ << " cannot load crypto accelerator of type " << crypto_accel_type << dendl;
211 return nullptr;
212 }
213 int err = factory->factory(&ca_impl, &ss);
214 if (err) {
215 lderr(cct) << __func__ << " factory return error " << err <<
216 " with description: " << ss.str() << dendl;
217 }
218 return ca_impl;
219}
220
221
222/**
223 * Encryption in CBC mode. Chunked to 4K blocks. Offset is used as IV for each 4K block.
224 *
225 *
226 *
227 * A. Encryption
228 * 1. Input is split to 4K chunks + remainder in one, smaller chunk
229 * 2. Each full chunk is encrypted separately with CBC chained mode, with initial IV derived from offset
230 * 3. Last chunk is 16*m + n.
231 * 4. 16*m bytes are encrypted with CBC chained mode, with initial IV derived from offset
232 * 5. Last n bytes are xor-ed with pattern obtained by CBC encryption of
233 * last encrypted 16 byte block <16m-16, 16m-15) with IV = {0}.
234 * 6. (Special case) If m == 0 then last n bytes are xor-ed with pattern
235 * obtained by CBC encryption of {0} with IV derived from offset
236 *
237 * B. Decryption
238 * 1. Input is split to 4K chunks + remainder in one, smaller chunk
239 * 2. Each full chunk is decrypted separately with CBC chained mode, with initial IV derived from offset
240 * 3. Last chunk is 16*m + n.
241 * 4. 16*m bytes are decrypted with CBC chained mode, with initial IV derived from offset
242 * 5. Last n bytes are xor-ed with pattern obtained by CBC ENCRYPTION of
243 * last (still encrypted) 16 byte block <16m-16,16m-15) with IV = {0}
244 * 6. (Special case) If m == 0 then last n bytes are xor-ed with pattern
245 * obtained by CBC ENCRYPTION of {0} with IV derived from offset
246 */
247#warning "TODO: use auth/Crypto instead of reimplementing."
248class AES_256_CBC : public BlockCrypt {
249public:
250 static const size_t AES_256_KEYSIZE = 256 / 8;
251 static const size_t AES_256_IVSIZE = 128 / 8;
252 static const size_t CHUNK_SIZE = 4096;
253private:
254 static const uint8_t IV[AES_256_IVSIZE];
255 CephContext* cct;
256 uint8_t key[AES_256_KEYSIZE];
257public:
258 AES_256_CBC(CephContext* cct): cct(cct) {
259 }
260 ~AES_256_CBC() {
261 memset(key, 0, AES_256_KEYSIZE);
262 }
263 bool set_key(const uint8_t* _key, size_t key_size) {
264 if (key_size != AES_256_KEYSIZE) {
265 return false;
266 }
267 memcpy(key, _key, AES_256_KEYSIZE);
268 return true;
269 }
270 size_t get_block_size() {
271 return CHUNK_SIZE;
272 }
273
274#ifdef USE_CRYPTOPP
275
276 bool cbc_transform(unsigned char* out,
277 const unsigned char* in,
278 size_t size,
279 const unsigned char (&iv)[AES_256_IVSIZE],
280 const unsigned char (&key)[AES_256_KEYSIZE],
281 bool encrypt)
282 {
283 if (encrypt) {
284 CBC_Mode< AES >::Encryption e;
285 e.SetKeyWithIV(key, AES_256_KEYSIZE, iv, AES_256_IVSIZE);
286 e.ProcessData((byte*)out, (byte*)in, size);
287 } else {
288 CBC_Mode< AES >::Decryption d;
289 d.SetKeyWithIV(key, AES_256_KEYSIZE, iv, AES_256_IVSIZE);
290 d.ProcessData((byte*)out, (byte*)in, size);
291 }
292 return true;
293 }
294
295#elif defined(USE_NSS)
296
297 bool cbc_transform(unsigned char* out,
298 const unsigned char* in,
299 size_t size,
300 const unsigned char (&iv)[AES_256_IVSIZE],
301 const unsigned char (&key)[AES_256_KEYSIZE],
302 bool encrypt)
303 {
304 bool result = false;
305 PK11SlotInfo *slot;
306 SECItem keyItem;
307 PK11SymKey *symkey;
308 CK_AES_CBC_ENCRYPT_DATA_PARAMS ctr_params = {0};
309 SECItem ivItem;
310 SECItem *param;
311 SECStatus ret;
312 PK11Context *ectx;
313 int written;
314
315 slot = PK11_GetBestSlot(CKM_AES_CBC, NULL);
316 if (slot) {
317 keyItem.type = siBuffer;
318 keyItem.data = const_cast<unsigned char*>(&key[0]);
319 keyItem.len = AES_256_KEYSIZE;
320 symkey = PK11_ImportSymKey(slot, CKM_AES_CBC, PK11_OriginUnwrap, CKA_UNWRAP, &keyItem, NULL);
321 if (symkey) {
322 memcpy(ctr_params.iv, iv, AES_256_IVSIZE);
323 ivItem.type = siBuffer;
324 ivItem.data = (unsigned char*)&ctr_params;
325 ivItem.len = sizeof(ctr_params);
326
327 param = PK11_ParamFromIV(CKM_AES_CBC, &ivItem);
328 if (param) {
329 ectx = PK11_CreateContextBySymKey(CKM_AES_CBC, encrypt?CKA_ENCRYPT:CKA_DECRYPT, symkey, param);
330 if (ectx) {
331 ret = PK11_CipherOp(ectx,
332 out, &written, size,
333 in, size);
334 if ((ret == SECSuccess) && (written == (int)size)) {
335 result = true;
336 }
337 PK11_DestroyContext(ectx, PR_TRUE);
338 }
339 SECITEM_FreeItem(param, PR_TRUE);
340 }
341 PK11_FreeSymKey(symkey);
342 }
343 PK11_FreeSlot(slot);
344 }
345 if (result == false) {
346 ldout(cct, 5) << "Failed to perform AES-CBC encryption: " << PR_GetError() << dendl;
347 }
348 return result;
349 }
350
351#else
352#error Must define USE_CRYPTOPP or USE_NSS
353#endif
354
355 bool cbc_transform(unsigned char* out,
356 const unsigned char* in,
357 size_t size,
358 off_t stream_offset,
359 const unsigned char (&key)[AES_256_KEYSIZE],
360 bool encrypt)
361 {
362 static std::atomic<bool> failed_to_get_crypto(false);
363 CryptoAccelRef crypto_accel;
364 if (! failed_to_get_crypto.load())
365 {
366 crypto_accel = get_crypto_accel(cct);
367 if (!crypto_accel)
368 failed_to_get_crypto = true;
369 }
370 bool result = true;
371 unsigned char iv[AES_256_IVSIZE];
372 for (size_t offset = 0; result && (offset < size); offset += CHUNK_SIZE) {
373 size_t process_size = offset + CHUNK_SIZE <= size ? CHUNK_SIZE : size - offset;
374 prepare_iv(iv, stream_offset + offset);
375 if (crypto_accel != nullptr) {
376 if (encrypt) {
377 result = crypto_accel->cbc_encrypt(out + offset, in + offset,
378 process_size, iv, key);
379 } else {
380 result = crypto_accel->cbc_decrypt(out + offset, in + offset,
381 process_size, iv, key);
382 }
383 } else {
384 result = cbc_transform(
385 out + offset, in + offset, process_size,
386 iv, key, encrypt);
387 }
388 }
389 return result;
390 }
391
392
393 bool encrypt(bufferlist& input,
394 off_t in_ofs,
395 size_t size,
396 bufferlist& output,
397 off_t stream_offset)
398 {
399 bool result = false;
400 size_t aligned_size = size / AES_256_IVSIZE * AES_256_IVSIZE;
401 size_t unaligned_rest_size = size - aligned_size;
402 output.clear();
403 buffer::ptr buf(aligned_size + AES_256_IVSIZE);
404 unsigned char* buf_raw = reinterpret_cast<unsigned char*>(buf.c_str());
405 const unsigned char* input_raw = reinterpret_cast<const unsigned char*>(input.c_str());
406
407 /* encrypt main bulk of data */
408 result = cbc_transform(buf_raw,
409 input_raw + in_ofs,
410 aligned_size,
411 stream_offset, key, true);
412 if (result && (unaligned_rest_size > 0)) {
413 /* remainder to encrypt */
414 if (aligned_size % CHUNK_SIZE > 0) {
415 /* use last chunk for unaligned part */
416 unsigned char iv[AES_256_IVSIZE] = {0};
417 result = cbc_transform(buf_raw + aligned_size,
418 buf_raw + aligned_size - AES_256_IVSIZE,
419 AES_256_IVSIZE,
420 iv, key, true);
421 } else {
422 /* 0 full blocks in current chunk, use IV as base for unaligned part */
423 unsigned char iv[AES_256_IVSIZE] = {0};
424 unsigned char data[AES_256_IVSIZE];
425 prepare_iv(data, stream_offset + aligned_size);
426 result = cbc_transform(buf_raw + aligned_size,
427 data,
428 AES_256_IVSIZE,
429 iv, key, true);
430 }
431 if (result) {
432 for(size_t i = aligned_size; i < size; i++) {
433 *(buf_raw + i) ^= *(input_raw + in_ofs + i);
434 }
435 }
436 }
437 if (result) {
438 ldout(cct, 25) << "Encrypted " << size << " bytes"<< dendl;
439 buf.set_length(size);
440 output.append(buf);
441 } else {
442 ldout(cct, 5) << "Failed to encrypt" << dendl;
443 }
444 return result;
445 }
446
447
448 bool decrypt(bufferlist& input,
449 off_t in_ofs,
450 size_t size,
451 bufferlist& output,
452 off_t stream_offset)
453 {
454 bool result = false;
455 size_t aligned_size = size / AES_256_IVSIZE * AES_256_IVSIZE;
456 size_t unaligned_rest_size = size - aligned_size;
457 output.clear();
458 buffer::ptr buf(aligned_size + AES_256_IVSIZE);
459 unsigned char* buf_raw = reinterpret_cast<unsigned char*>(buf.c_str());
460 unsigned char* input_raw = reinterpret_cast<unsigned char*>(input.c_str());
461
462 /* decrypt main bulk of data */
463 result = cbc_transform(buf_raw,
464 input_raw + in_ofs,
465 aligned_size,
466 stream_offset, key, false);
467 if (result && unaligned_rest_size > 0) {
468 /* remainder to decrypt */
469 if (aligned_size % CHUNK_SIZE > 0) {
470 /*use last chunk for unaligned part*/
471 unsigned char iv[AES_256_IVSIZE] = {0};
472 result = cbc_transform(buf_raw + aligned_size,
473 input_raw + in_ofs + aligned_size - AES_256_IVSIZE,
474 AES_256_IVSIZE,
475 iv, key, true);
476 } else {
477 /* 0 full blocks in current chunk, use IV as base for unaligned part */
478 unsigned char iv[AES_256_IVSIZE] = {0};
479 unsigned char data[AES_256_IVSIZE];
480 prepare_iv(data, stream_offset + aligned_size);
481 result = cbc_transform(buf_raw + aligned_size,
482 data,
483 AES_256_IVSIZE,
484 iv, key, true);
485 }
486 if (result) {
487 for(size_t i = aligned_size; i < size; i++) {
488 *(buf_raw + i) ^= *(input_raw + in_ofs + i);
489 }
490 }
491 }
492 if (result) {
493 ldout(cct, 25) << "Decrypted " << size << " bytes"<< dendl;
494 buf.set_length(size);
495 output.append(buf);
496 } else {
497 ldout(cct, 5) << "Failed to decrypt" << dendl;
498 }
499 return result;
500 }
501
502
503 void prepare_iv(byte (&iv)[AES_256_IVSIZE], off_t offset) {
504 off_t index = offset / AES_256_IVSIZE;
505 off_t i = AES_256_IVSIZE - 1;
506 unsigned int val;
507 unsigned int carry = 0;
508 while (i>=0) {
509 val = (index & 0xff) + IV[i] + carry;
510 iv[i] = val;
511 carry = val >> 8;
512 index = index >> 8;
513 i--;
514 }
515 }
516};
517
518
519std::unique_ptr<BlockCrypt> AES_256_CBC_create(CephContext* cct, const uint8_t* key, size_t len)
520{
521 auto cbc = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(cct));
522 cbc->set_key(key, AES_256_KEYSIZE);
523 return std::move(cbc);
524}
525
526
527const uint8_t AES_256_CBC::IV[AES_256_CBC::AES_256_IVSIZE] =
528 { 'a', 'e', 's', '2', '5', '6', 'i', 'v', '_', 'c', 't', 'r', '1', '3', '3', '7' };
529
530
531#ifdef USE_CRYPTOPP
532
533bool AES_256_ECB_encrypt(CephContext* cct,
534 const uint8_t* key,
535 size_t key_size,
536 const uint8_t* data_in,
537 uint8_t* data_out,
538 size_t data_size)
539{
540 bool res = false;
541 if (key_size == AES_256_KEYSIZE) {
542 try {
543 ECB_Mode< AES >::Encryption e;
544 e.SetKey( key, key_size );
545 e.ProcessData(data_out, data_in, data_size);
546 res = true;
547 } catch( CryptoPP::Exception& ex ) {
548 ldout(cct, 5) << "AES-ECB encryption failed with: " << ex.GetWhat() << dendl;
549 }
550 }
551 return res;
552}
553
554#elif defined USE_NSS
555
556bool AES_256_ECB_encrypt(CephContext* cct,
557 const uint8_t* key,
558 size_t key_size,
559 const uint8_t* data_in,
560 uint8_t* data_out,
561 size_t data_size) {
562 bool result = false;
563 PK11SlotInfo *slot;
564 SECItem keyItem;
565 PK11SymKey *symkey;
566 SECItem *param;
567 SECStatus ret;
568 PK11Context *ectx;
569 int written;
570 unsigned int written2;
571 if (key_size == AES_256_KEYSIZE) {
572 slot = PK11_GetBestSlot(CKM_AES_ECB, NULL);
573 if (slot) {
574 keyItem.type = siBuffer;
575 keyItem.data = const_cast<uint8_t*>(key);
576 keyItem.len = AES_256_KEYSIZE;
577
578 param = PK11_ParamFromIV(CKM_AES_ECB, NULL);
579 if (param) {
580 symkey = PK11_ImportSymKey(slot, CKM_AES_ECB, PK11_OriginUnwrap, CKA_UNWRAP, &keyItem, NULL);
581 if (symkey) {
582 ectx = PK11_CreateContextBySymKey(CKM_AES_ECB, CKA_ENCRYPT, symkey, param);
583 if (ectx) {
584 ret = PK11_CipherOp(ectx,
585 data_out, &written, data_size,
586 data_in, data_size);
587 if (ret == SECSuccess) {
588 ret = PK11_DigestFinal(ectx,
589 data_out + written, &written2,
590 data_size - written);
591 if (ret == SECSuccess) {
592 result = true;
593 }
594 }
595 PK11_DestroyContext(ectx, PR_TRUE);
596 }
597 PK11_FreeSymKey(symkey);
598 }
599 SECITEM_FreeItem(param, PR_TRUE);
600 }
601 PK11_FreeSlot(slot);
602 }
603 if (result == false) {
604 ldout(cct, 5) << "Failed to perform AES-ECB encryption: " << PR_GetError() << dendl;
605 }
606 } else {
607 ldout(cct, 5) << "Key size must be 256 bits long" << dendl;
608 }
609 return result;
610}
611
612#else
613#error Must define USE_CRYPTOPP or USE_NSS
614#endif
615
616
617RGWGetObj_BlockDecrypt::RGWGetObj_BlockDecrypt(CephContext* cct,
618 RGWGetDataCB* next,
619 std::unique_ptr<BlockCrypt> crypt):
620 RGWGetObj_Filter(next),
621 cct(cct),
622 crypt(std::move(crypt)),
623 enc_begin_skip(0),
624 ofs(0),
625 end(0),
626 cache()
627{
628 block_size = this->crypt->get_block_size();
629}
630
631RGWGetObj_BlockDecrypt::~RGWGetObj_BlockDecrypt() {
632}
633
634int RGWGetObj_BlockDecrypt::read_manifest(bufferlist& manifest_bl) {
635 parts_len.clear();
636 RGWObjManifest manifest;
637 if (manifest_bl.length()) {
638 bufferlist::iterator miter = manifest_bl.begin();
639 try {
640 ::decode(manifest, miter);
641 } catch (buffer::error& err) {
642 ldout(cct, 0) << "ERROR: couldn't decode manifest" << dendl;
643 return -EIO;
644 }
645 RGWObjManifest::obj_iterator mi;
646 for (mi = manifest.obj_begin(); mi != manifest.obj_end(); ++mi) {
647 if (mi.get_cur_stripe() == 0) {
648 parts_len.push_back(0);
649 }
650 parts_len.back() += mi.get_stripe_size();
651 }
652 if (cct->_conf->subsys.should_gather(ceph_subsys_rgw, 20)) {
653 for (size_t i = 0; i<parts_len.size(); i++) {
654 ldout(cct, 20) << "Manifest part " << i << ", size=" << parts_len[i] << dendl;
655 }
656 }
657 }
658 return 0;
659}
660
661int RGWGetObj_BlockDecrypt::fixup_range(off_t& bl_ofs, off_t& bl_end) {
662 off_t inp_ofs = bl_ofs;
663 off_t inp_end = bl_end;
664 if (parts_len.size() > 0) {
665 off_t in_ofs = bl_ofs;
666 off_t in_end = bl_end;
667
668 size_t i = 0;
669 while (i<parts_len.size() && (in_ofs > (off_t)parts_len[i])) {
670 in_ofs -= parts_len[i];
671 i++;
672 }
673 //in_ofs is inside block i
674 size_t j = 0;
675 while (j<parts_len.size() && (in_end > (off_t)parts_len[j])) {
676 in_end -= parts_len[j];
677 j++;
678 }
679 //in_end is inside block j
680
681 size_t rounded_end;
682 rounded_end = ( in_end & ~(block_size - 1) ) + (block_size - 1);
683 if (rounded_end + 1 >= parts_len[j]) {
684 rounded_end = parts_len[j] - 1;
685 }
686
687 enc_begin_skip = in_ofs & (block_size - 1);
688 ofs = bl_ofs - enc_begin_skip;
689 end = bl_end;
690 bl_ofs = bl_ofs - enc_begin_skip;
691 bl_end += rounded_end - in_end;
692 }
693 else
694 {
695 enc_begin_skip = bl_ofs & (block_size - 1);
696 ofs = bl_ofs & ~(block_size - 1);
697 end = bl_end;
698 bl_ofs = bl_ofs & ~(block_size - 1);
699 bl_end = ( bl_end & ~(block_size - 1) ) + (block_size - 1);
700 }
701 ldout(cct, 20) << "fixup_range [" << inp_ofs << "," << inp_end
702 << "] => [" << bl_ofs << "," << bl_end << "]" << dendl;
703 return 0;
704}
705
706
707int RGWGetObj_BlockDecrypt::handle_data(bufferlist& bl, off_t bl_ofs, off_t bl_len) {
708 int res = 0;
709 ldout(cct, 25) << "Decrypt " << bl_len << " bytes" << dendl;
710 size_t part_ofs = ofs;
711 size_t i = 0;
712 while (i<parts_len.size() && (part_ofs >= parts_len[i])) {
713 part_ofs -= parts_len[i];
714 i++;
715 }
716 if (cache.length() > 0) {
717 //append before operation.
718 off_t append_size = block_size - cache.length();
719 if (append_size > bl_len)
720 append_size = bl_len;
721 char *src = bl.get_contiguous(bl_ofs, append_size);
722 cache.append(src,append_size);
723 bl_ofs += append_size;
724 bl_len -= append_size;
725
726 if (cache.length() == block_size) {
727 bufferlist data;
728 if (! crypt->decrypt(cache, 0, block_size, data, part_ofs) ) {
729 return -ERR_INTERNAL_ERROR;
730 }
731 part_ofs += block_size;
732
733 off_t send_size = block_size - enc_begin_skip;
734 if (ofs + enc_begin_skip + send_size > end + 1) {
735 send_size = end + 1 - ofs - enc_begin_skip;
736 }
737 res = next->handle_data(data, enc_begin_skip, send_size);
738
739 enc_begin_skip = 0;
740 cache.clear();
741 ofs += block_size;
742 if (res != 0)
743 return res;
744 }
745 }
746 if (bl_len > 0) {
747 off_t aligned_size = bl_len & ~(block_size - 1);
748 //save remainder
749 off_t remainder = bl_len - aligned_size;
750 if(remainder > 0) {
751 bl.copy(bl_ofs + aligned_size, remainder, cache);
752 }
753 if (aligned_size > 0) {
754 bufferlist data;
755 if (! crypt->decrypt(bl, bl_ofs, aligned_size, data, part_ofs) ) {
756 return -ERR_INTERNAL_ERROR;
757 }
758 part_ofs += aligned_size;
759 off_t send_size = aligned_size - enc_begin_skip;
760 if (ofs + enc_begin_skip + send_size > end + 1) {
761 send_size = end + 1 - ofs - enc_begin_skip;
762 }
763 res = next->handle_data(data, enc_begin_skip, send_size);
764 enc_begin_skip = 0;
765 ofs += aligned_size;
766 if (res != 0)
767 return res;
768 }
769 }
770 return 0;
771}
772
773/**
774 * flush remainder of data to output
775 */
776int RGWGetObj_BlockDecrypt::flush() {
777 int res = 0;
778 size_t part_ofs = ofs;
779 size_t i = 0;
780 while (i<parts_len.size() && (part_ofs > parts_len[i])) {
781 part_ofs -= parts_len[i];
782 i++;
783 }
784 if (cache.length() > 0) {
785 bufferlist data;
786 if (! crypt->decrypt(cache, 0, cache.length(), data, part_ofs) ) {
787 return -ERR_INTERNAL_ERROR;
788 }
789 off_t send_size = cache.length() - enc_begin_skip;
790 if (ofs + enc_begin_skip + send_size > end + 1) {
791 send_size = end + 1 - ofs - enc_begin_skip;
792 }
793 res = next->handle_data(data, enc_begin_skip, send_size);
794 enc_begin_skip = 0;
795 ofs += send_size;
796 }
797 return res;
798}
799
800RGWPutObj_BlockEncrypt::RGWPutObj_BlockEncrypt(CephContext* cct,
801 RGWPutObjDataProcessor* next,
802 std::unique_ptr<BlockCrypt> crypt):
803 RGWPutObj_Filter(next),
804 cct(cct),
805 crypt(std::move(crypt)),
806 ofs(0),
807 cache()
808{
809 block_size = this->crypt->get_block_size();
810}
811
812RGWPutObj_BlockEncrypt::~RGWPutObj_BlockEncrypt() {
813}
814
815int RGWPutObj_BlockEncrypt::handle_data(bufferlist& bl,
816 off_t in_ofs,
817 void **phandle,
818 rgw_raw_obj *pobj,
819 bool *again) {
820 int res = 0;
821 ldout(cct, 25) << "Encrypt " << bl.length() << " bytes" << dendl;
822
823 if (*again) {
824 bufferlist no_data;
825 res = next->handle_data(no_data, in_ofs, phandle, pobj, again);
826 //if *again is not set to false, we will have endless loop
827 //drop info on log
828 if (*again) {
829 ldout(cct, 20) << "*again==true" << dendl;
830 }
831 return res;
832 }
833 off_t bl_ofs = 0;
834 if (cache.length() > 0) {
835 //append before operation.
836 off_t size = block_size - cache.length();
837 if (size > bl.length())
838 size = bl.length();
839 if (size > 0) {
840 char *src = bl.get_contiguous(0, size);
841 cache.append(src,size);
842 bl_ofs += size;
843 }
844 if (cache.length() == block_size) {
845 bufferlist data;
846 if (! crypt->encrypt(cache, 0, block_size, data, ofs)) {
847 return -ERR_INTERNAL_ERROR;
848 }
849 res = next->handle_data(data, ofs, phandle, pobj, again);
850 cache.clear();
851 ofs += block_size;
852 if (res != 0)
853 return res;
854 }
855 }
856 if (bl_ofs < bl.length()) {
857 off_t aligned_size = (bl.length() - bl_ofs) & ~(block_size - 1);
858 //save remainder
859 off_t remainder = (bl.length() - bl_ofs) - aligned_size;
860 if(remainder > 0) {
861 bl.copy(bl_ofs + aligned_size, remainder, cache);
862 }
863 if (aligned_size > 0) {
864 bufferlist data;
865 if (! crypt->encrypt(bl, bl_ofs, aligned_size, data, ofs) ) {
866 return -ERR_INTERNAL_ERROR;
867 }
868 res=next->handle_data(data, ofs, phandle, pobj, again);
869 ofs += aligned_size;
870 if (res != 0)
871 return res;
872 }
873 }
874 if (bl.length() == 0) {
875 if (cache.length() > 0) {
876 /*flush cached data*/
877 bufferlist data;
878 if (! crypt->encrypt(cache, 0, cache.length(), data, ofs) ) {
879 return -ERR_INTERNAL_ERROR;
880 }
881 res = next->handle_data(data, ofs, phandle, pobj, again);
882 ofs += cache.length();
883 cache.clear();
884 if (res != 0)
885 return res;
886 }
887 /*replicate 0-sized handle_data*/
888 res = next->handle_data(cache, ofs, phandle, pobj, again);
889 }
890 return res;
891}
892
893int RGWPutObj_BlockEncrypt::throttle_data(void *handle,
894 const rgw_raw_obj& obj,
895 uint64_t size,
896 bool need_to_wait) {
897 return next->throttle_data(handle, obj, size, need_to_wait);
898}
899
900std::string create_random_key_selector(CephContext * const cct) {
901 char random[AES_256_KEYSIZE];
902 if (get_random_bytes(&random[0], sizeof(random)) != 0) {
903 ldout(cct, 0) << "ERROR: cannot get_random_bytes. " << dendl;
904 for (char& v:random) v=rand();
905 }
906 return std::string(random, sizeof(random));
907}
908
909static int get_barbican_url(CephContext * const cct,
910 std::string& url)
911{
912 url = cct->_conf->rgw_barbican_url;
913 if (url.empty()) {
914 ldout(cct, 0) << "ERROR: conf rgw_barbican_url is not set" << dendl;
915 return -EINVAL;
916 }
917
918 if (url.back() != '/') {
919 url.append("/");
920 }
921
922 return 0;
923}
924
925static int request_key_from_barbican(CephContext *cct,
926 boost::string_ref key_id,
927 boost::string_ref key_selector,
928 const std::string& barbican_token,
929 std::string& actual_key) {
930 std::string secret_url;
931 int res;
932 res = get_barbican_url(cct, secret_url);
933 if (res < 0) {
934 return res;
935 }
936 secret_url += "v1/secrets/" + std::string(key_id);
937
938 bufferlist secret_bl;
939 RGWHTTPTransceiver secret_req(cct, &secret_bl);
940 secret_req.append_header("Accept", "application/octet-stream");
941 secret_req.append_header("X-Auth-Token", barbican_token);
942
943 res = secret_req.process("GET", secret_url.c_str());
944 if (res < 0) {
945 return res;
946 }
947 if (secret_req.get_http_status() ==
948 RGWHTTPTransceiver::HTTP_STATUS_UNAUTHORIZED) {
949 return -EACCES;
950 }
951
952 if (secret_req.get_http_status() >=200 &&
953 secret_req.get_http_status() < 300 &&
954 secret_bl.length() == AES_256_KEYSIZE) {
955 actual_key.assign(secret_bl.c_str(), secret_bl.length());
956 memset(secret_bl.c_str(), 0, secret_bl.length());
957 } else {
958 res = -EACCES;
959 }
960 return res;
961}
962
963static map<string,string> get_str_map(const string &str) {
964 map<string,string> m;
965 get_str_map(str, &m, ";, \t");
966 return m;
967}
968
969static int get_actual_key_from_kms(CephContext *cct,
970 boost::string_ref key_id,
971 boost::string_ref key_selector,
972 std::string& actual_key)
973{
974 int res = 0;
975 ldout(cct, 20) << "Getting KMS encryption key for key=" << key_id << dendl;
976 static map<string,string> str_map = get_str_map(
977 cct->_conf->rgw_crypt_s3_kms_encryption_keys);
978
979 map<string, string>::iterator it = str_map.find(std::string(key_id));
980 if (it != str_map.end() ) {
981 std::string master_key = from_base64((*it).second);
982 if (master_key.length() == AES_256_KEYSIZE) {
983 uint8_t _actual_key[AES_256_KEYSIZE];
984 if (AES_256_ECB_encrypt(cct,
985 reinterpret_cast<const uint8_t*>(master_key.c_str()), AES_256_KEYSIZE,
986 reinterpret_cast<const uint8_t*>(key_selector.data()),
987 _actual_key, AES_256_KEYSIZE)) {
988 actual_key = std::string((char*)&_actual_key[0], AES_256_KEYSIZE);
989 } else {
990 res = -EIO;
991 }
992 memset(_actual_key, 0, sizeof(_actual_key));
993 } else {
994 ldout(cct, 20) << "Wrong size for key=" << key_id << dendl;
995 res = -EIO;
996 }
997 } else {
998 std::string token;
999 if (rgw::keystone::Service::get_keystone_barbican_token(cct, token) < 0) {
1000 ldout(cct, 5) << "Failed to retrieve token for barbican" << dendl;
1001 res = -EINVAL;
1002 return res;
1003 }
1004
1005 res = request_key_from_barbican(cct, key_id, key_selector, token, actual_key);
1006 if (res != 0) {
1007 ldout(cct, 5) << "Failed to retrieve secret from barbican:" << key_id << dendl;
1008 }
1009 }
1010 return res;
1011}
1012
1013static inline void set_attr(map<string, bufferlist>& attrs,
1014 const char* key,
1015 boost::string_ref value)
1016{
1017 bufferlist bl;
1018 bl.append(value.data(), value.size());
1019 attrs[key] = std::move(bl);
1020}
1021
1022static inline std::string get_str_attribute(map<string, bufferlist>& attrs,
1023 const char *name)
1024{
1025 auto iter = attrs.find(name);
1026 if (iter == attrs.end()) {
1027 return {};
1028 }
1029 return iter->second.to_str();
1030}
1031
1032typedef enum {
1033 X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM=0,
1034 X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY,
1035 X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY_MD5,
1036 X_AMZ_SERVER_SIDE_ENCRYPTION,
1037 X_AMZ_SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID,
1038 X_AMZ_SERVER_SIDE_ENCRYPTION_LAST
1039} crypt_option_e;
1040
1041typedef struct {
1042 const char* http_header_name;
1043 const std::string post_part_name;
1044} crypt_option_names;
1045
1046static const crypt_option_names crypt_options[] = {
1047 {"HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM", "x-amz-server-side-encryption-customer-algorithm"},
1048 {"HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY", "x-amz-server-side-encryption-customer-key"},
1049 {"HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY_MD5", "x-amz-server-side-encryption-customer-key-md5"},
1050 {"HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION", "x-amz-server-side-encryption"},
1051 {"HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID", "x-amz-server-side-encryption-aws-kms-key-id"},
1052};
1053
1054static boost::string_ref get_crypt_attribute(
1055 RGWEnv* env,
1056 std::map<std::string,
1057 RGWPostObj_ObjStore::post_form_part,
1058 const ltstr_nocase>* parts,
1059 crypt_option_e option)
1060{
1061 static_assert(
1062 X_AMZ_SERVER_SIDE_ENCRYPTION_LAST == sizeof(crypt_options)/sizeof(*crypt_options),
1063 "Missing items in crypt_options");
1064 if (parts != nullptr) {
1065 auto iter
1066 = parts->find(crypt_options[option].post_part_name);
1067 if (iter == parts->end())
1068 return boost::string_ref();
1069 bufferlist& data = iter->second.data;
1070 boost::string_ref str = boost::string_ref(data.c_str(), data.length());
1071 return rgw_trim_whitespace(str);
1072 } else {
1073 const char* hdr = env->get(crypt_options[option].http_header_name, nullptr);
1074 if (hdr != nullptr) {
1075 return boost::string_ref(hdr);
1076 } else {
1077 return boost::string_ref();
1078 }
1079 }
1080}
1081
1082
1083int rgw_s3_prepare_encrypt(struct req_state* s,
1084 std::map<std::string, ceph::bufferlist>& attrs,
1085 std::map<std::string,
1086 RGWPostObj_ObjStore::post_form_part,
1087 const ltstr_nocase>* parts,
1088 std::unique_ptr<BlockCrypt>* block_crypt,
1089 std::map<std::string, std::string>& crypt_http_responses)
1090{
1091 int res = 0;
1092 crypt_http_responses.clear();
1093 {
1094 boost::string_ref req_sse_ca =
1095 get_crypt_attribute(s->info.env, parts, X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM);
1096 if (! req_sse_ca.empty()) {
1097 if (req_sse_ca != "AES256") {
1098 return -ERR_INVALID_REQUEST;
1099 }
1100 if (s->cct->_conf->rgw_crypt_require_ssl &&
1101 !s->info.env->exists("SERVER_PORT_SECURE")) {
1102 return -ERR_INVALID_REQUEST;
1103 }
1104 std::string key_bin = from_base64(
1105 get_crypt_attribute(s->info.env, parts, X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY) );
1106 if (key_bin.size() != AES_256_CBC::AES_256_KEYSIZE) {
1107 return -ERR_INVALID_REQUEST;
1108 }
1109 boost::string_ref keymd5 =
1110 get_crypt_attribute(s->info.env, parts, X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY_MD5);
1111 std::string keymd5_bin = from_base64(keymd5);
1112 if (keymd5_bin.size() != CEPH_CRYPTO_MD5_DIGESTSIZE) {
1113 return -ERR_INVALID_DIGEST;
1114 }
1115 MD5 key_hash;
1116 byte key_hash_res[CEPH_CRYPTO_MD5_DIGESTSIZE];
1117 key_hash.Update(reinterpret_cast<const byte*>(key_bin.c_str()), key_bin.size());
1118 key_hash.Final(key_hash_res);
1119
1120 if (memcmp(key_hash_res, keymd5_bin.c_str(), CEPH_CRYPTO_MD5_DIGESTSIZE) != 0) {
1121 return -ERR_INVALID_DIGEST;
1122 }
1123
1124 set_attr(attrs, RGW_ATTR_CRYPT_MODE, "SSE-C-AES256");
1125 set_attr(attrs, RGW_ATTR_CRYPT_KEYMD5, keymd5_bin);
1126
1127 if (block_crypt) {
1128 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1129 aes->set_key(reinterpret_cast<const uint8_t*>(key_bin.c_str()), AES_256_KEYSIZE);
1130 *block_crypt = std::move(aes);
1131 }
1132
1133 crypt_http_responses["x-amz-server-side-encryption-customer-algorithm"] = "AES256";
1134 crypt_http_responses["x-amz-server-side-encryption-customer-key-MD5"] = keymd5.to_string();
1135 return 0;
1136 }
1137 /* AMAZON server side encryption with KMS (key management service) */
1138 boost::string_ref req_sse =
1139 get_crypt_attribute(s->info.env, parts, X_AMZ_SERVER_SIDE_ENCRYPTION);
1140 if (! req_sse.empty()) {
1141 if (req_sse != "aws:kms") {
1142 return -ERR_INVALID_REQUEST;
1143 }
1144 if (s->cct->_conf->rgw_crypt_require_ssl &&
1145 !s->info.env->exists("SERVER_PORT_SECURE")) {
1146 return -ERR_INVALID_REQUEST;
1147 }
1148 boost::string_ref key_id =
1149 get_crypt_attribute(s->info.env, parts, X_AMZ_SERVER_SIDE_ENCRYPTION_AWS_KMS_KEY_ID);
1150 if (key_id.empty()) {
1151 return -ERR_INVALID_ACCESS_KEY;
1152 }
1153 /* try to retrieve actual key */
1154 std::string key_selector = create_random_key_selector(s->cct);
1155 std::string actual_key;
1156 res = get_actual_key_from_kms(s->cct, key_id, key_selector, actual_key);
1157 if (res != 0)
1158 return res;
1159 if (actual_key.size() != AES_256_KEYSIZE) {
1160 ldout(s->cct, 5) << "ERROR: key obtained from key_id:" <<
1161 key_id << " is not 256 bit size" << dendl;
1162 return -ERR_INVALID_ACCESS_KEY;
1163 }
1164 set_attr(attrs, RGW_ATTR_CRYPT_MODE, "SSE-KMS");
1165 set_attr(attrs, RGW_ATTR_CRYPT_KEYID, key_id);
1166 set_attr(attrs, RGW_ATTR_CRYPT_KEYSEL, key_selector);
1167
1168 if (block_crypt) {
1169 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1170 aes->set_key(reinterpret_cast<const uint8_t*>(actual_key.c_str()), AES_256_KEYSIZE);
1171 *block_crypt = std::move(aes);
1172 }
1173 actual_key.replace(0, actual_key.length(), actual_key.length(), '\000');
1174 return 0;
1175 }
1176
1177 /* no other encryption mode, check if default encryption is selected */
1178 if (s->cct->_conf->rgw_crypt_default_encryption_key != "") {
1179 std::string master_encryption_key =
1180 from_base64(s->cct->_conf->rgw_crypt_default_encryption_key);
1181 if (master_encryption_key.size() != 256 / 8) {
1182 ldout(s->cct, 0) << "ERROR: failed to decode 'rgw crypt default encryption key' to 256 bit string" << dendl;
1183 /* not an error to return; missing encryption does not inhibit processing */
1184 return 0;
1185 }
1186
1187 set_attr(attrs, RGW_ATTR_CRYPT_MODE, "RGW-AUTO");
1188 std::string key_selector = create_random_key_selector(s->cct);
1189 set_attr(attrs, RGW_ATTR_CRYPT_KEYSEL, key_selector);
1190
1191 uint8_t actual_key[AES_256_KEYSIZE];
1192 if (AES_256_ECB_encrypt(s->cct,
1193 reinterpret_cast<const uint8_t*>(master_encryption_key.c_str()), AES_256_KEYSIZE,
1194 reinterpret_cast<const uint8_t*>(key_selector.c_str()),
1195 actual_key, AES_256_KEYSIZE) != true) {
1196 memset(actual_key, 0, sizeof(actual_key));
1197 return -EIO;
1198 }
1199 if (block_crypt) {
1200 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1201 aes->set_key(reinterpret_cast<const uint8_t*>(actual_key), AES_256_KEYSIZE);
1202 *block_crypt = std::move(aes);
1203 }
1204 memset(actual_key, 0, sizeof(actual_key));
1205 return 0;
1206 }
1207 }
1208 /*no encryption*/
1209 return 0;
1210}
1211
1212
1213int rgw_s3_prepare_decrypt(struct req_state* s,
1214 map<string, bufferlist>& attrs,
1215 std::unique_ptr<BlockCrypt>* block_crypt,
1216 std::map<std::string, std::string>& crypt_http_responses)
1217{
1218 int res = 0;
1219 std::string stored_mode = get_str_attribute(attrs, RGW_ATTR_CRYPT_MODE);
1220 ldout(s->cct, 15) << "Encryption mode: " << stored_mode << dendl;
1221 if (stored_mode == "SSE-C-AES256") {
1222 if (s->cct->_conf->rgw_crypt_require_ssl &&
1223 !s->info.env->exists("SERVER_PORT_SECURE")) {
1224 return -ERR_INVALID_REQUEST;
1225 }
1226 const char *req_cust_alg =
1227 s->info.env->get("HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_ALGORITHM", NULL);
1228
1229 if ((nullptr == req_cust_alg) || (strcmp(req_cust_alg, "AES256") != 0)) {
1230 return -ERR_INVALID_REQUEST;
1231 }
1232
1233 std::string key_bin =
1234 from_base64(s->info.env->get("HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY", ""));
1235 if (key_bin.size() != AES_256_CBC::AES_256_KEYSIZE) {
1236 return -ERR_INVALID_REQUEST;
1237 }
1238
1239 std::string keymd5 =
1240 s->info.env->get("HTTP_X_AMZ_SERVER_SIDE_ENCRYPTION_CUSTOMER_KEY_MD5", "");
1241 std::string keymd5_bin = from_base64(keymd5);
1242 if (keymd5_bin.size() != CEPH_CRYPTO_MD5_DIGESTSIZE) {
1243 return -ERR_INVALID_DIGEST;
1244 }
1245
1246 MD5 key_hash;
1247 uint8_t key_hash_res[CEPH_CRYPTO_MD5_DIGESTSIZE];
1248 key_hash.Update(reinterpret_cast<const byte*>(key_bin.c_str()), key_bin.size());
1249 key_hash.Final(key_hash_res);
1250
1251 if ((memcmp(key_hash_res, keymd5_bin.c_str(), CEPH_CRYPTO_MD5_DIGESTSIZE) != 0) ||
1252 (get_str_attribute(attrs, RGW_ATTR_CRYPT_KEYMD5) != keymd5_bin)) {
1253 return -ERR_INVALID_DIGEST;
1254 }
1255 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1256 aes->set_key(reinterpret_cast<const uint8_t*>(key_bin.c_str()), AES_256_CBC::AES_256_KEYSIZE);
1257 if (block_crypt) *block_crypt = std::move(aes);
1258
1259 crypt_http_responses["x-amz-server-side-encryption-customer-algorithm"] = "AES256";
1260 crypt_http_responses["x-amz-server-side-encryption-customer-key-MD5"] = keymd5;
1261 return 0;
1262 }
1263
1264 if (stored_mode == "SSE-KMS") {
1265 if (s->cct->_conf->rgw_crypt_require_ssl &&
1266 !s->info.env->exists("SERVER_PORT_SECURE")) {
1267 return -ERR_INVALID_REQUEST;
1268 }
1269 /* try to retrieve actual key */
1270 std::string key_id = get_str_attribute(attrs, RGW_ATTR_CRYPT_KEYID);
1271 std::string key_selector = get_str_attribute(attrs, RGW_ATTR_CRYPT_KEYSEL);
1272 std::string actual_key;
1273 res = get_actual_key_from_kms(s->cct, key_id, key_selector, actual_key);
1274 if (res != 0) {
1275 ldout(s->cct, 10) << "No encryption key for key-id=" << key_id << dendl;
1276 return res;
1277 }
1278 if (actual_key.size() != AES_256_KEYSIZE) {
1279 ldout(s->cct, 0) << "ERROR: key obtained from key_id:" <<
1280 key_id << " is not 256 bit size" << dendl;
1281 return -ERR_INVALID_ACCESS_KEY;
1282 }
1283
1284 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1285 aes->set_key(reinterpret_cast<const uint8_t*>(actual_key.c_str()), AES_256_KEYSIZE);
1286 actual_key.replace(0, actual_key.length(), actual_key.length(), '\000');
1287 if (block_crypt) *block_crypt = std::move(aes);
1288
1289 crypt_http_responses["x-amz-server-side-encryption"] = "aws:kms";
1290 crypt_http_responses["x-amz-server-side-encryption-aws-kms-key-id"] = key_id;
1291 return 0;
1292 }
1293
1294 if (stored_mode == "RGW-AUTO") {
1295 std::string master_encryption_key =
1296 from_base64(std::string(s->cct->_conf->rgw_crypt_default_encryption_key));
1297 if (master_encryption_key.size() != 256 / 8) {
1298 ldout(s->cct, 0) << "ERROR: failed to decode 'rgw crypt default encryption key' to 256 bit string" << dendl;
1299 return -EIO;
1300 }
1301 std::string attr_key_selector = get_str_attribute(attrs, RGW_ATTR_CRYPT_KEYSEL);
1302 if (attr_key_selector.size() != AES_256_CBC::AES_256_KEYSIZE) {
1303 ldout(s->cct, 0) << "ERROR: missing or invalid " RGW_ATTR_CRYPT_KEYSEL << dendl;
1304 return -EIO;
1305 }
1306 uint8_t actual_key[AES_256_KEYSIZE];
1307 if (AES_256_ECB_encrypt(s->cct,
1308 reinterpret_cast<const uint8_t*>(master_encryption_key.c_str()),
1309 AES_256_KEYSIZE,
1310 reinterpret_cast<const uint8_t*>(attr_key_selector.c_str()),
1311 actual_key, AES_256_KEYSIZE) != true) {
1312 memset(actual_key, 0, sizeof(actual_key));
1313 return -EIO;
1314 }
1315 auto aes = std::unique_ptr<AES_256_CBC>(new AES_256_CBC(s->cct));
1316 aes->set_key(actual_key, AES_256_KEYSIZE);
1317 memset(actual_key, 0, sizeof(actual_key));
1318 if (block_crypt) *block_crypt = std::move(aes);
1319 return 0;
1320 }
1321 /*no decryption*/
1322 return 0;
1323}