Fix `char &` arguments being non-bindable

This changes the caster to return a reference to a (new) local `CharT`
type caster member so that binding lvalue-reference char arguments
works (currently it results in a compilation failure).

Fixes #1116
diff --git a/tests/test_builtin_casters.cpp b/tests/test_builtin_casters.cpp
index b73e96e..e5413c2 100644
--- a/tests/test_builtin_casters.cpp
+++ b/tests/test_builtin_casters.cpp
@@ -50,7 +50,9 @@
     // test_single_char_arguments
     m.attr("wchar_size") = py::cast(sizeof(wchar_t));
     m.def("ord_char", [](char c) -> int { return static_cast<unsigned char>(c); });
+    m.def("ord_char_lv", [](char &c) -> int { return static_cast<unsigned char>(c); });
     m.def("ord_char16", [](char16_t c) -> uint16_t { return c; });
+    m.def("ord_char16_lv", [](char16_t &c) -> uint16_t { return c; });
     m.def("ord_char32", [](char32_t c) -> uint32_t { return c; });
     m.def("ord_wchar", [](wchar_t c) -> int { return c; });
 
diff --git a/tests/test_builtin_casters.py b/tests/test_builtin_casters.py
index bc094a3..2f311f1 100644
--- a/tests/test_builtin_casters.py
+++ b/tests/test_builtin_casters.py
@@ -44,6 +44,7 @@
     toolong_message = "Expected a character, but multi-character string found"
 
     assert m.ord_char(u'a') == 0x61  # simple ASCII
+    assert m.ord_char_lv(u'b') == 0x62
     assert m.ord_char(u'é') == 0xE9  # requires 2 bytes in utf-8, but can be stuffed in a char
     with pytest.raises(ValueError) as excinfo:
         assert m.ord_char(u'Ā') == 0x100  # requires 2 bytes, doesn't fit in a char
@@ -54,9 +55,11 @@
 
     assert m.ord_char16(u'a') == 0x61
     assert m.ord_char16(u'é') == 0xE9
+    assert m.ord_char16_lv(u'ê') == 0xEA
     assert m.ord_char16(u'Ā') == 0x100
     assert m.ord_char16(u'‽') == 0x203d
     assert m.ord_char16(u'♥') == 0x2665
+    assert m.ord_char16_lv(u'♡') == 0x2661
     with pytest.raises(ValueError) as excinfo:
         assert m.ord_char16(u'🎂') == 0x1F382  # requires surrogate pair
     assert str(excinfo.value) == toobig_message(0x10000)