Because this program, which divides a number into decimal notation, transforms it into binary notation and prints the number in the correct sequence (from the most significant to the least significant bit ). uses malloc
twice?
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
int main(int argc, char *argv[]) {
// Stack
int decimal, q, r;
int counter, i;
char *binary = NULL;
char *aux;
printf("Digite um número em base decimal: ");
scanf("%d", &decimal);
counter = 1;
while (decimal >= 2) {
q = decimal / 2;
r = decimal - (q * 2);
// Heap
aux = (char *) malloc(counter * sizeof(char));
if (binary != NULL) {
memcpy(aux, binary, counter-1);
free(binary);
}
binary = aux;
if (r == 0) {
binary[counter-1] = 48; //'0';
} else {
binary[counter-1] = 49; //'1';
}
//printf("resto %d = %d\n", counter, r);
counter++;
decimal = q;
}
//printf("ultimo quociente = %d\n", q);
// Heap
aux = (char *) malloc(counter * sizeof(char));
if (binary != NULL) {
memcpy(aux, binary, counter-1);
free(binary);
}
binary = aux;
if (decimal == 0) {
binary[counter-1] = 48; //'0';
} else {
binary[counter-1] = 49; //'1';
}
printf("Resultado em binário = ");
for (i = counter-1; i >= 0; i--) {
printf("%c", binary[i]);
}
printf("\n");
free(binary);
return 0;
}