Implement token_categorize
authorLukas Jiriste <ljiriste@student.42prague.com>
Thu, 4 Jul 2024 11:27:40 +0000 (13:27 +0200)
committerLukas Jiriste <ljiriste@student.42prague.com>
Sun, 21 Jul 2024 18:21:20 +0000 (20:21 +0200)
This function fills the tokens t_vec with the language tokens.
It first lists terminal tokens, then end of file token, then
nonterminal tokens.

ft_parse/ft_parsing_table_generate.c

index 6f398519d3c09e16bbcce55c2f2c725c92a97a92..5a61463891217cebc50485fa387e31be133c4d1d 100644 (file)
@@ -6,7 +6,7 @@
 /*   By: ljiriste <ljiriste@student.42prague.com>   +#+  +:+       +#+        */
 /*                                                +#+#+#+#+#+   +#+           */
 /*   Created: 2024/06/27 11:16:53 by ljiriste          #+#    #+#             */
-/*   Updated: 2024/07/04 12:29:18 by ljiriste         ###   ########.fr       */
+/*   Updated: 2024/07/04 13:24:49 by ljiriste         ###   ########.fr       */
 /*                                                                            */
 /* ************************************************************************** */
 
@@ -450,9 +450,106 @@ t_ft_stat construct_first_kernel(__attribute__((unused))t_vec *kernel, __attribu
        return (res);
 }
 
-t_ft_stat      categorize_tokens(__attribute__((unused))t_vec *tokens, __attribute__((unused))const t_vec *rules)
+t_ft_stat      append_token(t_vec *tokens, const t_token *token)
 {
-       ft_printf("categorize_tokens is not yet implemented\n");
+       t_ft_stat       res;
+       t_token         token_clone;
+
+       token_clone = ft_token_dup(token);
+       if (!token_clone.type)
+               return (alloc_fail);
+       res = ft_vec_append(tokens, &token_clone);
+       if (res != success)
+               ft_free_token(&token_clone);
+       return (res);
+}
+
+t_ft_stat      prepend_token(t_vec *tokens, const t_token *token)
+{
+       t_ft_stat       res;
+       t_token         token_clone;
+
+       token_clone = ft_token_dup(token);
+       if (!token_clone.type)
+               return (alloc_fail);
+       res = ft_vec_insert(tokens, &token_clone, 0);
+       if (res != success)
+               ft_free_token(&token_clone);
+       return (res);
+}
+
+int    token_in_results(const t_token *token, const t_vec *rules)
+{
+       size_t                                  i;
+       const t_grammar_rule    *rule;
+
+       i = 1;
+       while (i < rules->size)
+       {
+               rule = ft_vec_caccess(rules, i);
+               if (!ft_strcmp(token->type, rule->result.type))
+                       return (1);
+               ++i;
+       }
+       return (0);
+}
+
+t_ft_stat      add_constituents(t_vec *tokens, const t_vec *constituents, const t_vec *rules)
+{
+       t_ft_stat               res;
+       size_t                  i;
+       const t_token   *token;
+
+       i = 0;
+       while (i < constituents->size)
+       {
+               token = ft_vec_caccess(constituents, i);
+               if (ft_vec_contains(tokens, token, void_cmp_token_type))
+               {
+                       ++i;
+                       continue ;
+               }
+               if (token_in_results(token, rules))
+                       res = append_token(tokens, token);
+               else
+                       res = prepend_token(tokens, token);
+               if (res != success)
+                       return (res);
+               ++i;
+       }
+       return (success);
+}
+
+t_ft_stat      categorize_tokens(t_vec *tokens, const t_vec *rules)
+{
+       t_ft_stat                               res;
+       size_t                                  i;
+       const t_grammar_rule    *rule;
+
+       res = append_token(tokens, &eof_token);
+       if (res != success)
+               return (res);
+       i = 1;
+       while (i < rules->size)
+       {
+               rule = ft_vec_caccess(rules, i);
+               if (!ft_vec_contains(tokens, &rule->result, void_cmp_token_type))
+               {
+                       res = append_token(tokens, &rule->result);
+                       if (res != success)
+                       {
+                               ft_vec_free(tokens, ft_free_token);
+                               return (res);
+                       }
+               }
+               res = add_constituents(tokens, &rule->constituents, rules);
+               if (res != success)
+               {
+                       ft_vec_free(tokens, ft_free_token);
+                       return (res);
+               }
+               ++i;
+       }
        return (success);
 }