Calculate execution time of a sorting algorithm in C

0

I'm having a question about how to get the runtime only in the sort algorithm. I spun a lot on the internet and found a lot of superficial, nothing that would help me in what I need.

I have an issue where I have to analyze the time the algorithm takes only to sort an external file (in this case I'm using random numbers of 1-1000 and no retry in test.txt). The code is running, but every time I try with a test.txt file that contains disordered values under 1000 characters, the execution time is zeroed. When a classmate talked to the teacher she was testing with 10,000 characters, he smiled and spoke for testing of 10, 20, 50, 100, 150, 200 ... and so on. Is there any way to count the time that the algorithm takes to run so that it works on both low and high inputs?

#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <sys/time.h>

#define TAM 1000

void SelectionSort_1();


int main (){        

    int iCont, jCont, aux = 0, vetor[TAM];

    FILE *ent;

    ent = fopen("teste.txt", "r");

    if(ent == NULL){
        printf("Erro! Nao consegui abrir o arquivo...\n");
        exit(1);
    }

    for(iCont = 0; iCont < TAM; iCont++){
        //printf("Lendo posicao %d\n", iCont);
        if(fscanf(ent, "%d", &vetor[iCont]) == EOF){
                setbuf(stdin, NULL);
            printf("Opa, tentei ler alem do final do arquivo...\n");
            break;
        }
    }

    SelectionSort_1(vetor);


    fclose( ent );

    printf("\n\nOrdenado: \n\n");
    for(iCont = 0; iCont < TAM; iCont++){
        printf("%d ", vetor[iCont]);
    }

    printf("\n\n\n\n");

    return 0;
}

void SelectionSort_1(int vetor[]){ 
    int iCont, jCont, min, aux = 0;

struct timeval  tv1, tv2;
gettimeofday(&tv1, NULL);

    for(iCont = 0; iCont < TAM - 1; iCont++){
        min = iCont;
        for(jCont = iCont + 1; jCont < TAM; jCont++){
            if(vetor[jCont] < vetor[min]) 
                min = jCont;
        }
    if(vetor[iCont] != vetor[min]){
        aux = vetor[iCont];
        vetor[iCont] = vetor[min];
        vetor[min] = aux;
     }
  }

gettimeofday(&tv2, NULL);
printf ("Total time = %.8f seconds\n",
         (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
         (double) (tv2.tv_sec - tv1.tv_sec));
}
    
asked by anonymous 13.07.2018 / 04:57

1 answer

1

It has the function clock () of the library timer.h, with it you get the amount of processor clocks:)

To use is easy, first you capture the clock before beginning the ordering and then just when the ordering finishes, and to get the time between them you subtract the clocks and divide by the constant CLOCKS_PER_SEC, very similar to this code

/* clock example: frequency of primes */
#include <stdio.h>      /* printf */
#include <time.h>       /* clock_t, clock, CLOCKS_PER_SEC */
#include <math.h>       /* sqrt */

int frequency_of_primes (int n) {
  int i,j;
  int freq=n-1;
  for (i=2; i<=n; ++i) for (j=sqrt(i);j>1;--j) if (i%j==0) {--freq; break;}
  return freq;
}

int main ()
{
  clock_t t;
  int f;
  t = clock();
  printf ("Calculating...\n");
  f = frequency_of_primes (99999);
  printf ("The number of primes lower than 100,000 is: %d\n",f);
  t = clock() - t;
  printf ("It took me %d clicks (%f seconds).\n",t,((float)t)/CLOCKS_PER_SEC);
  return 0;
}
    
15.07.2018 / 16:07