Index: Objects/unicodeobject.c =================================================================== --- Objects/unicodeobject.c (revision 78318) +++ Objects/unicodeobject.c (working copy) @@ -8170,6 +8170,7 @@ size_t buflen, PyObject *v) { + PyObject *s; /* presume that the buffer is at least 2 characters long */ if (PyUnicode_Check(v)) { if (PyUnicode_GET_SIZE(v) != 1) @@ -8180,6 +8181,11 @@ else if (PyString_Check(v)) { if (PyString_GET_SIZE(v) != 1) goto onError; + s = PyUnicode_FromString(PyString_AS_STRING(v)); + if (s == NULL) { + return -1; + } + Py_DECREF(s); buf[0] = (Py_UNICODE)PyString_AS_STRING(v)[0]; } Index: Lib/test/test_unicode.py =================================================================== --- Lib/test/test_unicode.py (revision 78318) +++ Lib/test/test_unicode.py (working copy) @@ -395,6 +395,19 @@ self.assertEqual(u'%c' % 0x1234, u'\u1234') self.assertRaises(OverflowError, u"%c".__mod__, (sys.maxunicode+1,)) + for num in range(0x00,0x80): + char = chr(num) + self.assertEqual(u"%c" % char, char) + self.assertEqual(u"%c" % num, char) + # Issue 7649 + for num in range(0x80,0x100): + uchar = unichr(num) + self.assertEqual(uchar, u"%c" % num) # works only with ints + self.assertEqual(uchar, u"%c" % uchar) # and unicode chars + # the implicit decoding should fail for non-ascii chars + self.assertRaises(UnicodeDecodeError, u"%c".__mod__, chr(num)) + self.assertRaises(UnicodeDecodeError, u"%s".__mod__, chr(num)) + # formatting jobs delegated from the string implementation: self.assertEqual('...%(foo)s...' % {'foo':u"abc"}, u'...abc...') self.assertEqual('...%(foo)s...' % {'foo':"abc"}, '...abc...')