console: Fix rendering of VGA underline

vga_putcharxy()'s underline code sets font_data to 0xffff instead of
0xff.  vga_putcharxy() then reads dmask16[0xffff >> 4] and
dmask4[0xffff >> 6].  In practice, these out-of-bounds subscripts
"only" put a few crap bits into the display surface.

For 32 bit pixels, there's no array access.  font_data's extra bits go
straight into the display surface.

Broken when commit 6d6f7c28 implemented underline.

Spotted by Coverity.

Signed-off-by: Markus Armbruster <armbru@redhat.com>
Signed-off-by: Anthony Liguori <aliguori@us.ibm.com>
diff --git a/console.c b/console.c
index e43de92..f6fe441 100644
--- a/console.c
+++ b/console.c
@@ -467,7 +467,7 @@
             font_data = *font_ptr++;
             if (t_attrib->uline
                 && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
-                font_data = 0xFFFF;
+                font_data = 0xFF;
             }
             ((uint32_t *)d)[0] = (dmask16[(font_data >> 4)] & xorcol) ^ bgcol;
             ((uint32_t *)d)[1] = (dmask16[(font_data >> 0) & 0xf] & xorcol) ^ bgcol;
@@ -480,7 +480,7 @@
             font_data = *font_ptr++;
             if (t_attrib->uline
                 && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
-                font_data = 0xFFFF;
+                font_data = 0xFF;
             }
             ((uint32_t *)d)[0] = (dmask4[(font_data >> 6)] & xorcol) ^ bgcol;
             ((uint32_t *)d)[1] = (dmask4[(font_data >> 4) & 3] & xorcol) ^ bgcol;
@@ -493,7 +493,7 @@
         for(i = 0; i < FONT_HEIGHT; i++) {
             font_data = *font_ptr++;
             if (t_attrib->uline && ((i == FONT_HEIGHT - 2) || (i == FONT_HEIGHT - 3))) {
-                font_data = 0xFFFF;
+                font_data = 0xFF;
             }
             ((uint32_t *)d)[0] = (-((font_data >> 7)) & xorcol) ^ bgcol;
             ((uint32_t *)d)[1] = (-((font_data >> 6) & 1) & xorcol) ^ bgcol;