ascii2unicode() always ends the u16 string with '\0'.
Remove redundant assignment.
Add description for efi_str_to_u16() and ascii2unicode().
Signed-off-by: Heinrich Schuchardt <xypron.glpk@gmx.de>
Signed-off-by: Alexander Graf <agraf@suse.de>
(((_dp)->type == DEVICE_PATH_TYPE_##_type) && \
((_dp)->sub_type == DEVICE_PATH_SUB_TYPE_##_subtype))
-/* Convert strings from normal C strings to uEFI strings */
+/**
+ * ascii2unicode() - convert ASCII string to UTF-16 string
+ *
+ * A zero terminated ASCII string is converted to a zero terminated UTF-16
+ * string. The output buffer must be preassigned.
+ *
+ * @unicode: preassigned output buffer for UTF-16 string
+ * @ascii: ASCII string to be converted
+ */
static inline void ascii2unicode(u16 *unicode, const char *ascii)
{
while (*ascii)
const efi_guid_t efi_guid_device_path_to_text_protocol =
EFI_DEVICE_PATH_TO_TEXT_PROTOCOL_GUID;
+/**
+ * efi_str_to_u16() - convert ASCII string to UTF-16
+ *
+ * A u16 buffer is allocated from pool. The ASCII string is copied to the u16
+ * buffer.
+ *
+ * @str: ASCII string
+ * Return: UTF-16 string. NULL if out of memory.
+ */
static u16 *efi_str_to_u16(char *str)
{
efi_uintn_t len;
if (ret != EFI_SUCCESS)
return NULL;
ascii2unicode(out, str);
- out[len - 1] = 0;
return out;
}