The second parameter 'DestMax' of StrCpyS() should be the number of
unicode characters, not the size in bytes.
Also, code is modified to keep align with the one in
IntelFrameworkPkg\Library\FrameworkUefiLib\UefiLibPrint.c.
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Hao Wu <hao.a.wu@intel.com>
Reviewed-by: Liming Gao <liming.gao@intel.com>
git-svn-id: https://svn.code.sf.net/p/edk2/code/trunk/edk2@18160
6f19259b-4bc3-4df7-8a09-
765794883524
SizeRequired = sizeof(CHAR16) + (CharactersRequired * sizeof(CHAR16));\r
}\r
\r
- BufferToReturn = AllocateZeroPool(SizeRequired);\r
+ BufferToReturn = AllocatePool(SizeRequired);\r
\r
if (BufferToReturn == NULL) {\r
return NULL;\r
+ } else {\r
+ BufferToReturn[0] = L'\0';\r
}\r
- \r
+\r
if (String != NULL) {\r
- StrCpyS(BufferToReturn, SizeRequired, String);\r
+ StrCpyS(BufferToReturn, SizeRequired / sizeof(CHAR16), String);\r
}\r
\r
UnicodeVSPrint(BufferToReturn + StrLen(BufferToReturn), (CharactersRequired+1) * sizeof(CHAR16), FormatString, Marker);\r