This repository was archived by the owner on May 22, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathfdefine.start.c
More file actions
112 lines (96 loc) · 3.48 KB
/
fdefine.start.c
File metadata and controls
112 lines (96 loc) · 3.48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
char * collect_user_input(){
char *buffer = (char*)malloc(100);
int buffer_size = 100;
int i = 0;
char last_char = '\0';
while(true){
char c = getchar();
if(c == '\n' && last_char != '\\'){
buffer[i] = '\0';
break;
}
if(i >= buffer_size - 1){
buffer_size *= 2;
buffer = (char*)realloc(buffer, buffer_size);
}
buffer[i] = c;
last_char = c;
i++;
}
return buffer;
}
int start_action(){
ModelProps *props =collect_model_props();
if(!props){
return 1;
}
OpenAiInterface *openAi = openai.openai_interface.newOpenAiInterface(props->url, props->key, props->model);
Asset *sao_paulo_slangs = get_asset("sao_paulo_slangs.txt");
if(!sao_paulo_slangs){
printf("%sError: %s%s\n", RED, "No sao paulo slangs found", RESET);
return 1;
}
openai.openai_interface.add_system_prompt(openAi,(char*)sao_paulo_slangs->data);
Asset * main_system_rules = get_asset("system_instructions.json");
if(!main_system_rules){
printf("%sError: %s%s\n", RED, "No system instructions found", RESET);
return 1;
}
cJSON *rules = cJSON_Parse((char*)main_system_rules->data);
if(!rules){
printf("%sError: %s%s\n", RED, "No system instructions found", RESET);
return 1;
}
int size = cJSON_GetArraySize(rules);
for(int i = 0; i <size;i++){
cJSON *current_rule = cJSON_GetArrayItem(rules,i);
openai.openai_interface.add_system_prompt(openAi,cJSON_GetStringValue(current_rule));
}
char name_message[100];
snprintf(name_message,sizeof(name_message)-1,"your model base its %s",props->model);
openai.openai_interface.add_system_prompt(openAi,name_message);
configure_read_asset_callbacks(openAi,props->model);
configure_list_recursively_callbacks(openAi,props->model);
configure_read_file_callbacks(openAi,props->model);
configure_write_file_callbacks(openAi, props->model);
configure_execute_command_callbacks(openAi,props->model);
configure_remove_file_callbacks(openAi,props->model);
configure_terminate_callbacks(openAi,props->model);
printf("%sWelcome to the Ragcraft, runing: %s interface%s\n", BLUE,props->model , RESET);
while (true){
printf("%s >Your Message:%s", GREEN,PURPLE);
fflush(stdout);
char *message = collect_user_input();
if(strcmp(message,"exit") == 0){
break;
}
if(strcmp(message,"clear") == 0){
#ifdef _WIN32
system("cls");
#else
system("clear");
#endif
continue;
}
openai.openai_interface.add_user_prompt(openAi, message);
OpenAiResponse *response = OpenAiInterface_make_question_finish_reason_treated(openAi);
if(openai.openai_interface.error(response)){
printf("%sError: %s%s\n", RED, openai.openai_interface.get_error_message(response), RESET);
break;
}
const char *first_answer = openai.response.get_content_str(response,0);
if(first_answer == NULL){
printf("%sError: %s%s\n", RED, "No answer found", RESET);
free(message);
break;
}
printf("%s < %s: %s%s\n", BLUE,props->model, first_answer, RESET);
openai.openai_interface.add_response_to_history(openAi, response,0);
free(message);
}
printf("%sGoodbye%s\n", BLUE, RESET);
cJSON_Delete(rules);
openai.openai_interface.free(openAi);
freeModelProps(props);
return 0;
}