fix: fixed get_network_success_rate problem
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing

This commit is contained in:
2024-12-02 14:08:24 +01:00
parent 04d66d0678
commit 5f8e806ab4
5 changed files with 65 additions and 22 deletions

View File

@ -3,6 +3,7 @@
#include <err.h>
#include <string.h>
#include "../Application/ApplicationUtils.h"
#include "../Image/ImageUtils.h"
#include "neural_utils.h"
void network_train(neural_network* network, char* training_data_dir, char* save_path, size_t iteration, size_t warmup, size_t warmup_iteration, double learning_rate, size_t AdaFactor)
@ -23,16 +24,32 @@ void network_train(neural_network* network, char* training_data_dir, char* save_
save_neural_network(network, save_path);
}
void network_use(neural_network* network, char** inputs)
void network_use(neural_network* network, double* inputs)
{
for (size_t i = 0; i < network->nb_input; i++)
{
network->inputs[i].activation = atof(inputs[i]);
network->inputs[i].activation = inputs[i];
}
process_network(network);
printf("Predicted character: %c", get_network_char_prediction(network, network->nb_output / 26));
printf("Predicted character: %c\n", get_network_char_prediction(network, network->nb_output / 26));
}
void network_test(char* network_path, char* data_path)
{
neural_network network;
load_neural_network(&network, read_file(network_path));
size_t AdaFactor = network.nb_output / 26;
size_t nb_data;
training_data* datas = load_dataset(data_path, AdaFactor, &nb_data);
printf("Network total cost: %f\n", get_network_average_cost(&network, datas, nb_data));
size_t suc = get_network_success_rate(&network, datas, nb_data, AdaFactor);
printf("Network success rate: %li/%li: %.2f%%\n", suc, nb_data, (((double)suc / (double)nb_data) * 100.0));
}
void network_main(int argc, char* argv[])
@ -41,7 +58,7 @@ void network_main(int argc, char* argv[])
char* network_application_directory = path_get_directory(argv[0]);
if (argc < 2)
errx(EXIT_FAILURE, "missing arguments, usage: ./network <train or use>");
errx(EXIT_FAILURE, "missing arguments, usage: ./network <train, use, test or help>");
char* action = argv[1];
@ -69,28 +86,47 @@ void network_main(int argc, char* argv[])
network_train(&network, data_dir, network_path, iterations, warmup, warmup_iterations, learning_rate, AdaFactor);
}
else if (strcmp(action, "use") == 0) //use network: ./network use <network.csv> input1,input2,...,inputx
else if (strcmp(action, "use") == 0) //use network: ./network use <network.csv> <image>
{
if (argc < 3)
errx(EXIT_FAILURE, "missing arguments, usage: ./network use <network.csv> input1,input2,...,inputx");
errx(EXIT_FAILURE, "missing arguments, usage: ./network use <network.csv> <image>");
char* network_path = combine_path(network_application_directory, argv[2]);
char* input_str = argv[3];
char* input_path = combine_path(network_application_directory, argv[3]);
size_t nb_input = 0;
size_t len = 0;
char** input_array = string_split(input_str, ',', &nb_input);
double* input_array = image_to_bool_array(input_path, &len);
neural_network network;
load_neural_network(&network, read_file(network_path));
if (nb_input != network.nb_input)
if (len != network.nb_input)
errx(EXIT_FAILURE, "inputs are not valid");
network_use(&network, input_array);
}
else if (strcmp(action, "test") == 0) //test network: ./network test <network.csv> <data directory>
{
if (argc < 3)
errx(EXIT_FAILURE, "missing arguments, usage: ./network test <network.csv> <data directory>");
char* network_path = combine_path(network_application_directory, argv[2]);
char* data_path = combine_path(network_application_directory, argv[3]);
network_test(network_path, data_path);
}
else if (strcmp(action, "help") == 0)
{
printf("Network usage:\ntrain network: ./network train <network.csv> <data directory> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]\nuse network: ./network use <network.csv> <image>\ntest network: ./network test <network.csv> <data directory>");
}
else
{
errx(EXIT_FAILURE, "invalid arguments!");
}
/*printf("Succes rate: %i\n", (int)(get_network_success_rate(&network, training_datas, data_len, 1) * 100));

View File

@ -553,6 +553,11 @@ double get_network_total_cost(neural_network* network, training_data* datas, siz
return cost;
}
double get_network_average_cost(neural_network* network, training_data* datas, size_t data_len)
{
return get_network_total_cost(network, datas, data_len) / (double) data_len;
}
char get_data_char_prediction(training_data data, size_t nb_output)
{
char res = '\0';
@ -672,9 +677,9 @@ void train_network(neural_network* network, training_data* datas, size_t data_le
}
}
double get_network_success_rate(neural_network* network, training_data* datas, size_t data_len, size_t AdaFactor)
size_t get_network_success_rate(neural_network* network, training_data* datas, size_t data_len, size_t AdaFactor)
{
int success = 0;
size_t success = 0;
for (size_t i = 0; i < data_len; i++)
{
@ -682,16 +687,13 @@ double get_network_success_rate(neural_network* network, training_data* datas, s
process_network(network);
for (size_t o = 0; o < data_len; o++)
{
if (get_network_char_prediction(network, AdaFactor) != get_data_char_prediction(datas[i], network->nb_output))
continue;
}
if (get_network_char_prediction(network, AdaFactor) != get_data_char_prediction(datas[i], network->nb_output))
continue;
success++;
}
return (double)success / (double)data_len;
return success;
}
training_data* load_dataset(const char* directory, size_t AdaFactor, size_t* nb_data)
@ -764,7 +766,7 @@ training_data* load_dataset(const char* directory, size_t AdaFactor, size_t* nb_
char letter = dir->d_name[0];
char letter_num = tolower(letter) - 'a';
datas[data_id].inputs = image_to_bool_array(file);
datas[data_id].inputs = image_to_bool_array(file, NULL);
double* out = calloc(26*AdaFactor, sizeof(double));

View File

@ -58,9 +58,11 @@ void network_process_epoche(neural_network* network, training_data* data, size_t
double get_network_total_cost(neural_network* network, training_data* datas, size_t data_len);
double get_network_average_cost(neural_network* network, training_data* datas, size_t data_len);
void train_network(neural_network* network, training_data* datas, size_t data_len, float learning_rate, size_t warmup, size_t warmup_iterations, size_t iterations);
double get_network_success_rate(neural_network* network, training_data* datas, size_t data_len, size_t AdaFactor);
size_t get_network_success_rate(neural_network* network, training_data* datas, size_t data_len, size_t AdaFactor);
training_data* load_dataset(const char* directory, size_t AdaFactor, size_t* nb_data);

View File

@ -262,7 +262,7 @@ SDL_Surface* rotate_pixels(SDL_Surface* surface, double angle) {
return rotated_surface;
}
double* image_to_bool_array(const char* file)
double* image_to_bool_array(const char* file, size_t* image_len)
{
SDL_Surface* surface = IMG_Load(file);
@ -271,6 +271,9 @@ double* image_to_bool_array(const char* file)
double* res = calloc(surface->w * surface->h , sizeof(double));
if (image_len != NULL)
*image_len = surface->w * surface->h;
for (int x = 0; x < surface->w; x++)
{
for (int y = 0; y < surface->h; y++)

View File

@ -36,7 +36,7 @@ SDL_Surface* create_sub_surface(SDL_Surface* surface, SDL_Rect rect);
SDL_Surface* rotate_pixels(SDL_Surface* surface, double angle);
double* image_to_bool_array(const char* file);
double* image_to_bool_array(const char* file, size_t* image_len);
SDL_Surface* resize_image(SDL_Surface* source, int new_width, int new_height);