I'm using stb_truetype
to render TrueType fonts in an OpenGL context.
Is there any simple way to pre-determine the height and width of a string in the font before it is rendered?
LWJGL STB True Type demo contains an implementation for this now (since August 2017), including kerning:
private float getStringWidth(STBTTFontinfo info, String text, int from, int to, int fontHeight) {
int width = 0;
try (MemoryStack stack = stackPush()) {
IntBuffer pCodePoint = stack.mallocInt(1);
IntBuffer pAdvancedWidth = stack.mallocInt(1);
IntBuffer pLeftSideBearing = stack.mallocInt(1);
int i = from;
while (i < to) {
i += getCP(text, to, i, pCodePoint);
int cp = pCodePoint.get(0);
stbtt_GetCodepointHMetrics(info, cp, pAdvancedWidth, pLeftSideBearing);
width += pAdvancedWidth.get(0);
if (isKerningEnabled() && i < to) {
getCP(text, to, i, pCodePoint);
width += stbtt_GetCodepointKernAdvance(info, cp, pCodePoint.get(0));
}
}
}
return width * stbtt_ScaleForPixelHeight(info, fontHeight);
}
private static int getCP(String text, int to, int i, IntBuffer cpOut) {
char c1 = text.charAt(i);
if (Character.isHighSurrogate(c1) && i + 1 < to) {
char c2 = text.charAt(i + 1);
if (Character.isLowSurrogate(c2)) {
cpOut.put(0, Character.toCodePoint(c1, c2));
return 2;
}
}
cpOut.put(0, c1);
return 1;
}