I'm having a question about how to get the runtime only in the sort algorithm. I spun a lot on the internet and found a lot of superficial, nothing that would help me in what I need.
I have an issue where I have to analyze the time the algorithm takes only to sort an external file (in this case I'm using random numbers of 1-1000 and no retry in test.txt). The code is running, but every time I try with a test.txt file that contains disordered values under 1000 characters, the execution time is zeroed. When a classmate talked to the teacher she was testing with 10,000 characters, he smiled and spoke for testing of 10, 20, 50, 100, 150, 200 ... and so on. Is there any way to count the time that the algorithm takes to run so that it works on both low and high inputs?
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <sys/time.h>
#define TAM 1000
void SelectionSort_1();
int main (){
int iCont, jCont, aux = 0, vetor[TAM];
FILE *ent;
ent = fopen("teste.txt", "r");
if(ent == NULL){
printf("Erro! Nao consegui abrir o arquivo...\n");
exit(1);
}
for(iCont = 0; iCont < TAM; iCont++){
//printf("Lendo posicao %d\n", iCont);
if(fscanf(ent, "%d", &vetor[iCont]) == EOF){
setbuf(stdin, NULL);
printf("Opa, tentei ler alem do final do arquivo...\n");
break;
}
}
SelectionSort_1(vetor);
fclose( ent );
printf("\n\nOrdenado: \n\n");
for(iCont = 0; iCont < TAM; iCont++){
printf("%d ", vetor[iCont]);
}
printf("\n\n\n\n");
return 0;
}
void SelectionSort_1(int vetor[]){
int iCont, jCont, min, aux = 0;
struct timeval tv1, tv2;
gettimeofday(&tv1, NULL);
for(iCont = 0; iCont < TAM - 1; iCont++){
min = iCont;
for(jCont = iCont + 1; jCont < TAM; jCont++){
if(vetor[jCont] < vetor[min])
min = jCont;
}
if(vetor[iCont] != vetor[min]){
aux = vetor[iCont];
vetor[iCont] = vetor[min];
vetor[min] = aux;
}
}
gettimeofday(&tv2, NULL);
printf ("Total time = %.8f seconds\n",
(double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
(double) (tv2.tv_sec - tv1.tv_sec));
}