i found a problem in the following code:
It looks like GCC can only devirtualize the first Interface I_Udc. The Interface GCC is not able to devirtualize the second interface I_Uac. If I write I_Uac first, the I_Uac call will be inlined.
Here is the code at compiler explorer: https://godbolt.org/z/z6WEoznfW
class I_Uac
{
public:
virtual float GetIUac() = 0;
};
class I_Udc
{
public:
virtual float GetIUdc() = 0;
};
class DataAcq final : public I_Udc, public I_Uac
{
float GetIUac()
{
return r;
}
float GetIUdc()
{
return m;
}
private:
float r = 20;
float m = 20;
};
DataAcq temp;
I_Uac& temp1 = temp;
I_Udc& temp2 = temp;
int main ()
{
volatile float r = temp1.GetIUac();
volatile float m = temp2.GetIUdc();
}
So the question is how can i force the compiler to inline both function calls via the interface reference?
The best/only way I know to guarantee devirtualization is to not virtualize in the first place. A slight change to your classes allows for a CRTP approach, though it's a tad uglier, especially when referring to the base class types, it's entirely doable.
#include <cstdio>
template <typename DerivedT>
class I_Uac {
public:
float GetIUac() {
return static_cast<DerivedT*>(this)->GetIUac();
};
};
template <typename DerivedT>
class I_Udc {
public:
float GetIUdc() {
return static_cast<DerivedT*>(this)->GetIUdc();
};
};
template <template <typename...> class... BaseTs>
class DataAcq final : public BaseTs<DataAcq<BaseTs...>>... {
public:
float GetIUac()
{
std::puts("GetIUac"); // much nicer in compiler explorer output
return r;
}
float GetIUdc()
{
std::puts("GetIUdc");
return m;
}
private:
float r = 20;
float m = 20;
};
DataAcq<I_Udc, I_Uac> temp;
I_Udc<decltype(temp)>& temp1 = temp;
I_Uac<decltype(temp)>& temp2 = temp;
int main ()
{
volatile float r = temp1.GetIUdc();
volatile float m = temp2.GetIUac();
}