    #include <bitset>
    #include <iostream>
    
    std::bitset<128> 
    starch_32 (const std::bitset<32> &input)
    {
    	std::bitset<128> output;
    	
        for (size_t i = 0; i < input.size(); ++i) {
            // If `input[N]` is `true`, set `output[N*4, N*4+4]` to true.
            if (input.test (i)) {
                const size_t output_index = i * 4;
            
                output.set (output_index);
                output.set (output_index + 1);
                output.set (output_index + 2);
                output.set (output_index + 3);
            }
        }
        
        return output;
    }
    
    // Example with 0xC. 
    int main() {
        std::bitset<32> input{0xcf00};
    
        auto result = starch_32 (input);
        
        std::cout << std::hex << result.to_ullong() << "\n";
    }