Search code examples
javascriptfromcharcode

Why am I getting \u0000 instead of characters when using String.fromCharCode?


I could not find this particular problem addressed.

The object is to decode a Caesar cipher. My code works to the point of putting the correct letter codes into an array. I can manually turn that array into the correct string like this:

String.fromCharCode(89, 79, 85, 32, 68, 73, 68, 32, 73, 84, 33);

But when I try to turn the array into a string like this:

  return String.fromCharCode(arr.join(", "));

it returns \u0000, which I gather is the Unicode null character.

Can anybody explain what's going on?

Here's my complete code:

function rot13(str) {
  var arr = [];
  for (var i = 0; i < str.length; i++){
    if (str.charCodeAt(i) > 77 && str.charCodeAt(i) < 91){
      arr.push(str.charCodeAt(i) - 13);
    } else if (str.charCodeAt(i) <=77 && str.charCodeAt(i) > 64) {
      arr.push(str.charCodeAt(i) + 13);
    } else {
      arr.push(str.charCodeAt(i));
    }
  }
  console.log(arr);
  return String.fromCharCode(arr.join(", "));
}

rot13("LBH QVQ VG!");
String.fromCharCode(89, 79, 85, 32, 68, 73, 68, 32, 73, 84, 33);


Solution

  • arr.join(',') will not expand to a list of arguments to a function. You either need to use Function.apply (.apply(null, arr)) or if you have ES6 available, use the spread operator:

    return String.fromCharCode(...arr);
    

    or

    return String.fromCharCode.apply(null, arr);
    

    function rot13(str) {
      var arr = [];
      for (var i = 0; i < str.length; i++){
        if (str.charCodeAt(i) > 77 && str.charCodeAt(i) < 91){
          arr.push(str.charCodeAt(i) - 13);
        } else if (str.charCodeAt(i) <=77 && str.charCodeAt(i) > 64) {
          arr.push(str.charCodeAt(i) + 13);
        } else {
          arr.push(str.charCodeAt(i));
        }
      }
    
      return String.fromCharCode.apply(null, arr);
    }
    
    console.log(rot13("LBH QVQ VG!"));