feat: fixed problems and display
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
@ -9,8 +9,8 @@
|
||||
void network_train(neural_network* network, char* training_data_dir, char* save_path, double batch_pourcent, size_t iteration, size_t warmup, size_t warmup_iteration, double learning_rate, size_t AdaFactor)
|
||||
{
|
||||
network->nb_input = 169;
|
||||
network->hidden_height = 30;
|
||||
network->hidden_depth = 2;
|
||||
network->hidden_height = 200;
|
||||
network->hidden_depth = 1;
|
||||
network->nb_output = 26 * AdaFactor;
|
||||
|
||||
init_neural_network(network);
|
||||
@ -24,11 +24,29 @@ void network_train(neural_network* network, char* training_data_dir, char* save_
|
||||
train_network(network, training_datas, data_len, learning_rate, batch_size, warmup, warmup_iteration, iteration);
|
||||
|
||||
printf("Final network cost: %f\n", get_network_total_cost(network, training_datas, data_len));
|
||||
printf("Final success rate: %i\n", (int)(get_network_success_rate(network, training_datas, data_len, AdaFactor) * 100.0));
|
||||
size_t suc = get_network_success_rate(network, training_datas, data_len, AdaFactor);
|
||||
printf("Network success rate: %li/%li: %.2f%%\n", suc, data_len, (((double)suc / (double)data_len) * 100.0));
|
||||
|
||||
save_neural_network(network, save_path);
|
||||
}
|
||||
|
||||
void network_retrain(char* network_path, char* training_data_dir, double batch_pourcent, size_t iteration, double learning_rate)
|
||||
{
|
||||
neural_network network;
|
||||
|
||||
load_neural_network(&network, read_file(network_path));
|
||||
|
||||
size_t data_len;
|
||||
|
||||
training_data* datas = load_dataset(training_data_dir, network.nb_output / 26, &data_len);
|
||||
|
||||
size_t batch_size = (size_t)(data_len * batch_pourcent);
|
||||
|
||||
train_network(&network, datas, data_len, learning_rate, batch_size, 0, 0, iteration);
|
||||
|
||||
save_neural_network(&network, network_path);
|
||||
}
|
||||
|
||||
void network_use(neural_network* network, double* inputs)
|
||||
{
|
||||
for (size_t i = 0; i < network->nb_input; i++)
|
||||
@ -52,7 +70,7 @@ void network_test(char* network_path, char* data_path)
|
||||
size_t nb_data;
|
||||
training_data* datas = load_dataset(data_path, AdaFactor, &nb_data);
|
||||
|
||||
printf("Network total cost: %f\n", get_network_average_cost(&network, datas, nb_data));
|
||||
printf("Network total cost: %f\n", get_network_total_cost(&network, datas, nb_data));
|
||||
size_t suc = get_network_success_rate(&network, datas, nb_data, AdaFactor);
|
||||
printf("Network success rate: %li/%li: %.2f%%\n", suc, nb_data, (((double)suc / (double)nb_data) * 100.0));
|
||||
}
|
||||
@ -63,7 +81,7 @@ void network_main(int argc, char* argv[])
|
||||
char* network_application_directory = path_get_directory(argv[0]);
|
||||
|
||||
if (argc < 2)
|
||||
errx(EXIT_FAILURE, "missing arguments, usage: ./network <train, use, test or help>");
|
||||
errx(EXIT_FAILURE, "missing arguments, usage: ./network <train, retrain, use, test or help>");
|
||||
|
||||
char* action = argv[1];
|
||||
|
||||
@ -94,7 +112,20 @@ void network_main(int argc, char* argv[])
|
||||
|
||||
network_train(&network, data_dir, network_path, batch_pourcent, iterations, warmup, warmup_iterations, learning_rate, AdaFactor);
|
||||
}
|
||||
else if (strcmp(action, "retrain") == 0) //retrain network: ./network <network.csv> <data directory> <batch pourcent> <iterations>
|
||||
{
|
||||
if (argc < 4)
|
||||
errx(EXIT_FAILURE, "missing arguments, usage: ./network <network.csv> <data directory> <batch pourcent> <iterations> [learning_rate]");
|
||||
|
||||
double batch_pourcent = atof(argv[3]);
|
||||
size_t iterations = (size_t)atoi(argv[4]);
|
||||
|
||||
double learning_rate = 0.1;
|
||||
if (argc > 4)
|
||||
learning_rate = atof(argv[5]);
|
||||
|
||||
network_retrain(argv[1], argv[2], batch_pourcent, iterations, learning_rate);
|
||||
}
|
||||
else if (strcmp(action, "use") == 0) //use network: ./network use <network.csv> <image path>
|
||||
{
|
||||
if (argc < 3)
|
||||
@ -108,7 +139,7 @@ void network_main(int argc, char* argv[])
|
||||
|
||||
load_neural_network(&network, read_file(network_path));
|
||||
|
||||
network_use(&network, image_to_bool_array(input_path));
|
||||
network_use(&network, image_to_bool_array(input_path, NULL));
|
||||
}
|
||||
else if (strcmp(action, "test") == 0) //test network: ./network test <network.csv> <data directory>
|
||||
{
|
||||
@ -123,7 +154,7 @@ void network_main(int argc, char* argv[])
|
||||
}
|
||||
else if (strcmp(action, "help") == 0)
|
||||
{
|
||||
printf("Network usage:\ntrain network: ./network train <network.csv> <data directory> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]\nuse network: ./network use <network.csv> <image>\ntest network: ./network test <network.csv> <data directory>");
|
||||
printf("Network usage:\ntrain network: ./network train <network.csv> <data directory> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]\nuse network: ./network use <network.csv> <image>\ntest network: ./network test <network.csv> <data directory>\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -706,11 +706,6 @@ double get_network_total_cost(neural_network* network, training_data* datas, siz
|
||||
return cost / (double)data_len;
|
||||
}
|
||||
|
||||
double get_network_average_cost(neural_network* network, training_data* datas, size_t data_len)
|
||||
{
|
||||
return get_network_total_cost(network, datas, data_len) / (double) data_len;
|
||||
}
|
||||
|
||||
char get_data_char_prediction(training_data data, size_t nb_output)
|
||||
{
|
||||
char res = '\0';
|
||||
|
@ -56,8 +56,6 @@ void network_process_epoche(neural_network* network, neural_network* memory_netw
|
||||
|
||||
double get_network_total_cost(neural_network* network, training_data* datas, size_t data_len);
|
||||
|
||||
double get_network_average_cost(neural_network* network, training_data* datas, size_t data_len);
|
||||
|
||||
void train_network(neural_network* network, training_data* datas, size_t data_len, float learning_rate, size_t batch_size, size_t warmup, size_t warmup_iterations, size_t iterations);
|
||||
|
||||
size_t get_network_success_rate(neural_network* network, training_data* datas, size_t data_len, size_t AdaFactor);
|
||||
|
Reference in New Issue
Block a user