问题描述:

Hi so I am creating a program that uses a hash to store words and their number of occurrences from a text file. That works as intended. The issue I am having comes from freeing the allocated memory.

Here is my hash

#include<stdio.h>

#include<stdlib.h>

#include<string.h>

#include<ctype.h>

#include"hash.h"

/*

struct listnode{

char * word;

int count;

};

*/

void hashCreate(struct listnode * hashTable[], int size){

int i;

for(i=0;i<size;i++){

hashTable[i]=(struct listnode *)malloc(sizeof(struct listnode));

hashTable[i]->count=0;

}

}

int hash(char * data, int size) {

unsigned long hash = 5381;

char * p;

for (p = data; *p != '\0'; ++p) {

hash = (hash * 33) + *p;

}

return (int)(hash % size);

}

void hashAdd(char * data, struct listnode * hashTable[], int size){

int key=hash(data, size);

hashTable[key]->word=strdup(data);

hashTable[key]->count+=1;

}

void hashPrint(struct listnode * hashTable[], int size){

int i;

for(i=0;i<size;i++){

if(hashTable[i]->count!=0)

printf("%s: %d \n",hashTable[i]->word,hashTable[i]->count);

}

}

void hashDelete(struct listnode * hashTable[],int size){

int i;

for(i=0;i<size;i++){

free(hashTable[i]->word);

free(hashTable[i]);

}

}

This is what uses it

#include<stdio.h>

#include<stdlib.h>

#include<string.h>

#include<ctype.h>

#include"hash.h"

/*

int hash(char * data, int size) {

unsigned long hash = 5381;

char * p;

for (p = data; *p != '\0'; ++p) {

hash = (hash * 33) + *p;

}

return (int)(hash % size);

}

*/

#define SIZE 1500

void removePunct(char * str);

void fileRead(char * filename);

struct listnode * hashTable[1500];

int main(int argc, char ** argv){

int i;

if(argc<2)

fprintf(stderr,"Enter filename \n");

hashCreate(hashTable, SIZE);

for(i=1; i<argc; i++){

fileRead(argv[i]);

}

hashPrint(hashTable,SIZE);

hashDelete(hashTable, SIZE);

return 0;

}

void fileRead(char * filename){

FILE * file = fopen(filename,"r");

char word[80];

if(!file){

fprintf(stderr,"Error opening file \n");

return;

}

while(fscanf(file, "%s", word)==1){

removePunct(word);

hashAdd(word,hashTable,SIZE);

}

fclose(file);

}

void removePunct(char * str){

int i,p=0;

for(i=0; i<strlen(str);i++){

if(isalpha(str[i]) || str[i]==' '){

str[p]=tolower(str[i]);

p++;

}

}

str[p]='\0';

}

In my hashDelete function the strings are not being freed which is causing a memory leak. I tested it by freeing the string within the hashAdd function and there were no memory leaks but there were also no strings being printed. Im unable to find the issue that is not letting me free all my memory. Any help would be appreciated.

网友答案:

In this code

void hashAdd(char * data, struct listnode * hashTable[], int size){
    int key=hash(data, size);
    hashTable[key]->word=strdup(data);
    hashTable[key]->count+=1;
}

you use strdup to get a new string (malloc'ed by strdup). If you already have done that once for a given key you'll leak memory.

So you need a check like:

if (hashTable[key]->word == NULL) hashTable[key]->word=strdup(data);

However, that requires that you initialize wordto NULL when creating the table.

Of topic: Normally, you will however need to handle identical key values with some extra code. The data value resulting in that key may or may not be the same as the word already stored. Something you should check. If they are identical, you can increment count. If they differ, you'll have to have a method for storing two different words with identical key values.

It could look something like:

void hashAdd(char * data, struct listnode * hashTable[], int size){
    int key=hash(data, size);
    if (hashTable[key]->word == NULL) {
        // First time with this key
        hashTable[key]->word=strdup(data);
        hashTable[key]->count+=1;
     } else {
         // Key already used once
         if (strcmp(data, hashTable[key]->word) == 0) {
             // Same word
             hashTable[key]->count+=1;
         } else {
             // Different word
             // ...
             // Add code for storing this word in another location
             // ...
         }
     }
}
相关阅读:
Top