feat: Change Makefile to build IA separatly

This commit is contained in:
2024-12-02 16:35:59 +01:00
parent 01790ea885
commit a0c3305fd5
4 changed files with 45 additions and 33 deletions

View File

@ -9,9 +9,13 @@ RESOURCES_BUILD_DIR := $(BUILD_DIR)/resources
MAIN_SRCS := $(shell find $(SRC_DIRS) -name *.cpp -or -name *.c -or -name *.s | grep -v main_solver.c)
SOLVER_SRCS := ./src/main_solver.c ./src/utils/Solver/gridResolver.c
# IA files
IA_SRCS := ./src/network_main.c ./src/utils/AI/neural_utils.c ./src/utils/Application/ApplicationUtils.c ./src/utils/Image/ImageUtils.c
IA_OBJS := $(IA_SRCS:%=$(BUILD_DIR)/%.o)
MAIN_OBJS := $(MAIN_SRCS:%=$(BUILD_DIR)/%.o)
SOLVER_OBJS := $(SOLVER_SRCS:%=$(BUILD_DIR)/%.o)
DEPS := $(MAIN_OBJS:.o=.d) $(SOLVER_OBJS:.o=.d)
DEPS := $(MAIN_OBJS:.o=.d) $(SOLVER_OBJS:.o=.d) $(IA_OBJS:.o=.d)
INC_DIRS := $(shell find $(SRC_DIRS) -type d)
INC_FLAGS := $(addprefix -I,$(INC_DIRS))
@ -19,9 +23,9 @@ INC_FLAGS := $(addprefix -I,$(INC_DIRS))
CPPFLAGS := $(INC_FLAGS) -MMD -MP `sdl2-config --cflags` -Wall -Wextra -Werror -g -pedantic -std=c99
LDFLAGS := `sdl2-config --libs` -lSDL2_ttf -lSDL2_image -lm
# Default rule: build both main and solver
# Default rule: build all components
.PHONY: all
all: main solver
all: main solver IA
# Rule to copy resources
.PHONY: copy_resources
@ -43,6 +47,13 @@ solver: $(BUILD_DIR)/solver
$(BUILD_DIR)/solver: $(SOLVER_OBJS)
$(CC) $(SOLVER_OBJS) -o $@ $(LDFLAGS)
# Build IA (network_main.c + neural_utils.c)
.PHONY: IA
IA: $(BUILD_DIR)/IA
$(BUILD_DIR)/IA: $(IA_OBJS)
$(CC) $(IA_OBJS) -o $@ $(LDFLAGS)
# Build step for C source
$(BUILD_DIR)/%.c.o: %.c
mkdir -p $(dir $@)
@ -62,4 +73,4 @@ prepare:
make clean; bear -- make
# Include the .d makefiles. The - at the front suppresses the errors of missing Makefiles.
-include $(DEPS)
-include $(DEPS)

View File

@ -2,11 +2,11 @@
#include <stdlib.h>
#include <err.h>
#include <string.h>
#include "../Application/ApplicationUtils.h"
#include "../Image/ImageUtils.h"
#include "utils/Application/ApplicationUtils.h"
#include "utils/Image/ImageUtils.h"
#include "neural_utils.h"
void network_train(neural_network* network, char* training_data_dir, char* save_path, double batch_pourcent, size_t iteration, size_t warmup, size_t warmup_iteration, double learning_rate, size_t AdaFactor)
void network_train(neural_network *network, char *training_data_dir, char *save_path, double batch_pourcent, size_t iteration, size_t warmup, size_t warmup_iteration, double learning_rate, size_t AdaFactor)
{
network->nb_input = 169;
network->hidden_height = 200;
@ -17,7 +17,7 @@ void network_train(neural_network* network, char* training_data_dir, char* save_
size_t data_len;
training_data* training_datas = load_dataset(training_data_dir, AdaFactor, &data_len);
training_data *training_datas = load_dataset(training_data_dir, AdaFactor, &data_len);
size_t batch_size = (size_t)(data_len * batch_pourcent);
@ -30,7 +30,7 @@ void network_train(neural_network* network, char* training_data_dir, char* save_
save_neural_network(network, save_path);
}
void network_retrain(char* network_path, char* training_data_dir, double batch_pourcent, size_t iteration, double learning_rate)
void network_retrain(char *network_path, char *training_data_dir, double batch_pourcent, size_t iteration, double learning_rate)
{
neural_network network;
@ -38,7 +38,7 @@ void network_retrain(char* network_path, char* training_data_dir, double batch_p
size_t data_len;
training_data* datas = load_dataset(training_data_dir, network.nb_output / 26, &data_len);
training_data *datas = load_dataset(training_data_dir, network.nb_output / 26, &data_len);
size_t batch_size = (size_t)(data_len * batch_pourcent);
@ -47,7 +47,7 @@ void network_retrain(char* network_path, char* training_data_dir, double batch_p
save_neural_network(&network, network_path);
}
void network_use(neural_network* network, double* inputs)
void network_use(neural_network *network, double *inputs)
{
for (size_t i = 0; i < network->nb_input; i++)
{
@ -59,7 +59,7 @@ void network_use(neural_network* network, double* inputs)
printf("Predicted character: %c\n", get_network_char_prediction(network, network->nb_output / 26));
}
void network_test(char* network_path, char* data_path)
void network_test(char *network_path, char *data_path)
{
neural_network network;
@ -68,33 +68,33 @@ void network_test(char* network_path, char* data_path)
size_t AdaFactor = network.nb_output / 26;
size_t nb_data;
training_data* datas = load_dataset(data_path, AdaFactor, &nb_data);
training_data *datas = load_dataset(data_path, AdaFactor, &nb_data);
printf("Network total cost: %f\n", get_network_total_cost(&network, datas, nb_data));
size_t suc = get_network_success_rate(&network, datas, nb_data, AdaFactor);
printf("Network success rate: %li/%li: %.2f%%\n", suc, nb_data, (((double)suc / (double)nb_data) * 100.0));
}
void network_main(int argc, char* argv[])
int main(int argc, char *argv[])
{
(void)argc;
char* network_application_directory = path_get_directory(argv[0]);
char *network_application_directory = path_get_directory(argv[0]);
if (argc < 2)
errx(EXIT_FAILURE, "missing arguments, usage: ./network <train, retrain, use, test or help>");
char* action = argv[1];
char *action = argv[1];
neural_network network;
if (strcmp(action, "train") == 0) //train network: ./network train <network.csv> <data directory> <batch pourcent> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]
if (strcmp(action, "train") == 0) // train network: ./network train <network.csv> <data directory> <batch pourcent> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]
{
if (argc < 8)
errx(EXIT_FAILURE, "missing arguments, usage: ./network train <network.csv> <data directory> <batch pourcent> <iterations> <warmup> <warmup iterations> [learning_rate] [AdaFactor]");
char* network_path = combine_path(network_application_directory, argv[2]);
char* data_dir = combine_path(network_application_directory, argv[3]);
char *network_path = combine_path(network_application_directory, argv[2]);
char *data_dir = combine_path(network_application_directory, argv[3]);
double batch_pourcent = atof(argv[4]);
if (batch_pourcent > 1)
errx(EXIT_FAILURE, "invalid argument: <batch_pourcent> must be between 0 and 1");
@ -112,11 +112,11 @@ void network_main(int argc, char* argv[])
network_train(&network, data_dir, network_path, batch_pourcent, iterations, warmup, warmup_iterations, learning_rate, AdaFactor);
}
else if (strcmp(action, "retrain") == 0) //retrain network: ./network <network.csv> <data directory> <batch pourcent> <iterations>
else if (strcmp(action, "retrain") == 0) // retrain network: ./network <network.csv> <data directory> <batch pourcent> <iterations>
{
if (argc < 4)
errx(EXIT_FAILURE, "missing arguments, usage: ./network <network.csv> <data directory> <batch pourcent> <iterations> [learning_rate]");
double batch_pourcent = atof(argv[3]);
size_t iterations = (size_t)atoi(argv[4]);
@ -126,14 +126,14 @@ void network_main(int argc, char* argv[])
network_retrain(argv[1], argv[2], batch_pourcent, iterations, learning_rate);
}
else if (strcmp(action, "use") == 0) //use network: ./network use <network.csv> <image path>
else if (strcmp(action, "use") == 0) // use network: ./network use <network.csv> <image path>
{
if (argc < 3)
errx(EXIT_FAILURE, "missing arguments, usage: ./network use <network.csv> <image path>");
char* network_path = combine_path(network_application_directory, argv[2]);
char *network_path = combine_path(network_application_directory, argv[2]);
char* input_path = combine_path(network_application_directory, argv[3]);
char *input_path = combine_path(network_application_directory, argv[3]);
neural_network network;
@ -141,14 +141,14 @@ void network_main(int argc, char* argv[])
network_use(&network, image_to_bool_array(input_path, NULL));
}
else if (strcmp(action, "test") == 0) //test network: ./network test <network.csv> <data directory>
else if (strcmp(action, "test") == 0) // test network: ./network test <network.csv> <data directory>
{
if (argc < 3)
errx(EXIT_FAILURE, "missing arguments, usage: ./network test <network.csv> <data directory>");
char* network_path = combine_path(network_application_directory, argv[2]);
char *network_path = combine_path(network_application_directory, argv[2]);
char* data_path = combine_path(network_application_directory, argv[3]);
char *data_path = combine_path(network_application_directory, argv[3]);
network_test(network_path, data_path);
}
@ -160,6 +160,7 @@ void network_main(int argc, char* argv[])
{
errx(EXIT_FAILURE, "invalid arguments!");
}
return 1;
/*printf("Succes rate: %i\n", (int)(get_network_success_rate(&network, training_datas, data_len, 1) * 100));
@ -172,4 +173,4 @@ void network_main(int argc, char* argv[])
printf("Character: a, predicted character: %c\n", get_network_char_prediction(&network));
save_neural_network(&network, combine_path(network_application_directory, "../network.csv"));*/
}
}

View File

@ -1064,4 +1064,4 @@ void print_training_debug(neural_network* network, training_data* data, size_t d
process_network(&network);
printf("Character: a, predicted character: %c\n", get_network_char_prediction(&network));*/
printf("Character: a, predicted character: %c\n", get_network_char_prediction(&network));*/

View File

@ -34,7 +34,7 @@ typedef struct
double sigmoid(double x);
double double_rand();
double double_rand(double size);
void init_neuron(neuron* n, size_t nb_connection);
@ -74,4 +74,4 @@ void print_network_state(neural_network *network);
void print_training_debug(neural_network* network, training_data* data, size_t data_len);
#endif
#endif