I'm building an application that can show text on the screen based on a JSON object that it gets from a TCP Socket. The JSON object contains information like text (base64 encoded), font, font size, and so on. I've implemented some of these things, but I'm stuck at changing the font. I have a folder named fonts
. The program looks for the font in this folder and if it finds it, it clears the character map and builds another one with the selected font file. The problem is that it fails to create the texture glCreateTextures(GL_TEXTURE_2D, 1, &texture);
and the program crashes.
This is the function that listens for the JSON message from the TCP socket:
void listen_for_connection(void * aArg) {
while (true) {
try {
asio::io_context io_context;
tcp::acceptor acceptor(io_context, tcp::endpoint(tcp::v4(), 8080));
tcp::socket socket(io_context);
acceptor.accept(socket);
for (;;) {
// JSON command
string message = read_(socket);
json command = json::parse(message);
//get the font
try {
cout << "Getting the font ..." << endl;
string font = command["font"].get < string > ();
cout << "Got the font: " << font << endl;
ifstream ifile;
ifile.open("./fonts/" + font + ".ttf");
cout << "Checking if the font file exists ..." << endl;
if (ifile) { // only change font if the font exists
cout << "Found the font file :)" << endl;
text_mutex.lock();
ifile.close();
font = "./fonts/" + font + ".ttf";
cout << "Got the mutex" << endl;
char c[font.size() + 1];
font.copy(c, font.size() + 1);
c[font.size()] = '\0';
cout << "Setting the font and executing GL Setup" << endl;
FONT = c;
initializeFreeType();
cout << "Done!" << endl;
text_mutex.unlock();
}
} catch (exception & e) {
cout << "Error while trying to get the font: " << e.what() << endl;
}
// get the text to be shown
try {
list < wstring > result = getTextFromCommand(command);
text_mutex.lock();
TEXT.clear();
for (wstring s: result) {
TEXT.push_back(s);
}
text_mutex.unlock();
} catch (exception & e) {
cout << "Error while trying to get the text: " << e.what() << endl;
}
// get the text size
try {
int font_size = command["font_size"].get < int > ();
text_mutex.lock();
TEXT_SCALE = ((float) font_size) / ((float) FONT_SIZE);
text_mutex.unlock();
} catch (exception & e) {
cout << "Error while trying to get the font size: " << e.what() << endl;
}
try {
int monitor = command["monitor"].get < int > ();
if (monitor < TOTAL_MONITORS && monitor >= 0 && monitor != MONITOR_TO_CHANGE) {
monitor_event_mutex.lock();
SHOULD_CHANGE_MONITOR = true;
MONITOR_TO_CHANGE = monitor;
monitor_event_mutex.unlock();
}
} catch (exception & e) {
cout << "Error while trying to get the monitor: " << e.what() << endl;
}
}
} catch (exception & e) {
cout << "Another exception has occurred: " << e.what() << endl;
}
}
}
Here is the initializeFreeType()
function:
void initializeFreeType() {
cout << "Initializing FreeType ..." << endl;
int error;
FT_Library ft;
error = FT_Init_FreeType( & ft);
cout << "Checking for errors ..." << endl;
if (error) {
printf("Error while trying to initialize freetype library: %d\n", error);
}
FT_Face face;
error = FT_New_Face(ft, FONT, 0, & face);
if (error == FT_Err_Unknown_File_Format) {
printf("Error: File format not supported\n");
exit(1);
} else if (error) {
printf("Error while trying to initialize face: %d\n", error);
}
cout << "Done setting the font " << endl;
FT_Set_Pixel_Sizes(face, 0, FONT_SIZE);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
if (!Characters.empty()) {
cout << "Clearing the Characters" << endl;
Characters.clear();
}
for (GLuint c = 0; c < 512; c++) {
FT_Load_Char(face, (wchar_t) c, FT_LOAD_RENDER);
GLuint texture = 0;
cout << "Creating textures..." << endl;
glCreateTextures(GL_TEXTURE_2D, 1, & texture);
cout << "Done creating textures" << endl;
cout << "Storage 2D" << endl;
glTextureStorage2D(texture, 1, GL_R8, face -> glyph -> bitmap.width, face -> glyph -> bitmap.rows);
cout << "Done Storage 2D" << endl;
glTextureSubImage2D(texture, 0, 0, 0, face -> glyph -> bitmap.width, face -> glyph -> bitmap.rows, GL_RED, GL_UNSIGNED_BYTE, face -> glyph -> bitmap.buffer);
cout << "Done SubImage 2D" << endl;
cout << "The rest ..." << endl;
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
cout << "Done with the rest :)" << endl;
Character character = {
texture,
glm::ivec2(face -> glyph -> bitmap.width, face -> glyph -> bitmap.rows),
glm::ivec2(face -> glyph -> bitmap_left, face -> glyph -> bitmap_top),
face -> glyph -> advance.x
};
cout << "Adding character ..." << endl;
Characters.insert(pair < wchar_t, Character > ((wchar_t) c, character));
cout << "Done adding character" << endl;
}
cout << "Done inserting the Characters :)" << endl;
FT_Done_Face(face);
cout << "Done face" << endl;
FT_Done_FreeType(ft);
cout << "Done freetype" << endl;
}
This is the glSetup()
function (gets executed at the beginning of the code, it has little importance, but I chose to write it here for completion):
void glSetup() {
glewInit();
glDebugMessageCallback(GLDebugMessageCallback, NULL);
glEnable(GL_CULL_FACE);
glEnable(GL_DEBUG_OUTPUT);
glViewport(0, 0, DEFAULT_MONITOR.maxResolution.width, DEFAULT_MONITOR.maxResolution.height);
GLuint shader = CompileShaders(true, false, false, false, true);
glUseProgram(shader);
initializeFreeType();
glm::mat4 projection = glm::ortho(0.0 f, (float) DEFAULT_MONITOR.maxResolution.width, 0.0 f, (float) DEFAULT_MONITOR.maxResolution.height);
glUniformMatrix4fv(1, 1, GL_FALSE, glm::value_ptr(projection));
GLuint vao;
glCreateVertexArrays(1, & vao);
glBindVertexArray(vao);
glCreateBuffers(1, & buffer);
glNamedBufferStorage(buffer, sizeof(GLfloat) * 6 * 4, NULL, GL_DYNAMIC_STORAGE_BIT);
glVertexArrayVertexBuffer(vao, 0, buffer, 0, sizeof(GLfloat) * 4);
glVertexArrayAttribFormat(vao, 0, 4, GL_FLOAT, GL_FALSE, 0);
glVertexArrayAttribBinding(vao, 0, 0);
glEnableVertexArrayAttrib(vao, 0);
glUniform3f(6, 0.88 f, 0.59 f, 0.07 f);
}
I have a lot of cout
s so that I can debug (I can't use the debugger, it always crashes or shows me nothing). When it gets a JSON to change the font, it always crashes after Creating textures...
. Can someone help me and point the mistake that I'm doing? Thank you very much!
PS: You can find the entire project here. It's pretty easy to build, although it has no readme (yet). You just need to open the project with Code::Blocks (all the dependencies are in the repository)
As you said in comments, you use a thread for everything, and a thread for the socket.
OpenGL cannot work in multithread by default. You must first hand the context to that thread and wait for the other thread.
This is why you can't simply call initializeFreeType
in your socket thread, since it calls OpenGL function.
It may be possible to use multiple context and share data between them. It's also possible to use Vulkan, a lower level API for graphics that allows you to use many threads for loading and rendering.
The simplest change you can do would be to set a font to load the next frame.
Of course, you will need some kind of atomic variable or a lock.
std::atomic<char const*> FONT;
Your listen_for_connection
function should be like this:
FONT = c;
// initializeFreeType(); // Don't load immediately
cout << "Done!" << endl;
And your load function something like that:
void initializeFreeType() {
// Get the value and set to null in one atomic operation
auto font_to_load = FONT.exchange(nullptr);
if (not font_to_load) {
return; // If null, do nothing
}
// Load the font ...
}
The principle is this: when the FONT
variable is null, do nothing. If it has value, set it to null again and load the font with the value it had. Only the socket thread can set the FONT
value.
That way you can call initializeFreeType
each frame and only do something when necessary.
Of course, you can set a value at the beginning of the program (before starting the socket and before the first frame) so you have a font to begin with.