/* By: ljiriste <ljiriste@student.42prague.com> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/06/27 11:16:53 by ljiriste #+# #+# */
-/* Updated: 2024/06/28 17:05:55 by ljiriste ### ########.fr */
+/* Updated: 2024/07/04 10:21:29 by ljiriste ### ########.fr */
/* */
/* ************************************************************************** */
#include "ft_parse.h"
#include <stdlib.h>
+static const t_token eof_token = {.type = "$", .str = NULL};
+static const t_token empty_token = {.type = "''", .str = NULL};
+
void free_item(t_lr1_item *item)
{
ft_vec_free(&item->lookahead, ft_free_token);
int cmp_token_type(const t_token *token1, const t_token *token2)
{
+ if (!token1 && !token2)
+ return(0);
+ else if (!token1 || !token2)
+ return(1);
return (ft_strcmp(token1->type, token2->type));
}
return (states->size);
}
-t_ft_stat add_prediction(__attribute__((unused))t_vec *closure, __attribute__((unused))const t_lr1_item *item, __attribute__((unused))const t_vec *rules)
+const t_token *get_next_token(const t_marked_grammar_rule *rule)
{
- ft_printf("add_prediction is not yet implemented\n");
+ return (ft_vec_caccess(&rule->rule->constituents, rule->position + 1));
+}
+
+int is_terminal_token(const t_token *token, const t_vec *tokens)
+{
+ size_t i;
+ const t_token *table_token;
+
+ i = 0;
+ while ((i == 0 || cmp_token_type(table_token, &eof_token)) && i < tokens->size)
+ {
+ table_token = ft_vec_caccess(tokens, i);
+ if (!cmp_token_type(table_token, token))
+ return (1);
+ ++i;
+ }
+ return (0);
+}
+
+t_ft_stat expand_lookahead(t_vec *lookahead, const t_marked_grammar_rule *rule, const t_vec *rules, const t_vec *tokens);
+
+t_ft_stat add_first(t_vec *lookahead, const t_token *token, const t_vec *rules, const t_vec *tokens)
+{
+ t_ft_stat res;
+ size_t i;
+ t_marked_grammar_rule rule;
+ t_token token_copy;
+
+ if (is_terminal_token(token, tokens))
+ {
+ token_copy = ft_token_dup(*token);
+ res = ft_vec_setinsert(lookahead, token, void_cmp_token_type);
+ if (res != success)
+ ft_free_token(&token_copy);
+ return (res);
+ }
+ rule.position = 0;
+ i = 0;
+ while (i < rules->size)
+ {
+ rule.rule = ft_vec_caccess(rules, i);
+ if (!cmp_token_type(token, &rule.rule->result))
+ {
+ res = expand_lookahead(lookahead, &rule, rules, tokens);
+ if (res != success)
+ return (res);
+ }
+ ++i;
+ }
+ return (success);
+}
+
+t_ft_stat expand_lookahead(t_vec *lookahead, const t_marked_grammar_rule *rule, const t_vec *rules, const t_vec *tokens)
+{
+ size_t i;
+ t_ft_stat res;
+
+ i = rule->position + 1;
+ while ((i == rule->position + 1 || ft_vec_contains(lookahead, &empty_token, void_cmp_token_type)) && i < rule->rule->constituents.size)
+ {
+ res = add_first(lookahead, ft_vec_caccess(&rule->rule->constituents, i), rules, tokens);
+ if (res != success)
+ return (res);
+ ++i;
+ }
+ return (success);
+}
+
+t_ft_stat add_to_lookahead(const t_vec *lookahead, t_vec *new_lookahead)
+{
+ t_ft_stat res;
+ size_t i;
+ t_token token;
+
+ i = 0;
+ while (i < lookahead->size)
+ {
+ token = ft_token_dup(*(const t_token *)ft_vec_caccess(lookahead, i));
+ res = ft_vec_setinsert(new_lookahead, &token, void_cmp_token_type);
+ if (res != success)
+ ft_free_token(&token);
+ if (res != success && res != already_inside)
+ return (res);
+ ++i;
+ }
+ return (success);
+}
+
+t_ft_stat add_lookahead(t_lr1_item *new, const t_lr1_item *item, const t_vec *rules, const t_vec *tokens)
+{
+ t_ft_stat res;
+
+ res = ft_vec_init(&new->lookahead, sizeof(t_token));
+ if (res != success)
+ return (res);
+ res = expand_lookahead(&new->lookahead, &item->core, rules, tokens);
+ if (res != success)
+ return (res);
+ if (ft_vec_contains(&new->lookahead, &empty_token, void_cmp_token_type))
+ res = add_to_lookahead(&item->lookahead, &new->lookahead);
+ return (res);
+}
+
+t_ft_stat add_predictions(t_vec *closure, const t_lr1_item *item, const t_vec *rules, const t_vec *tokens)
+{
+ size_t i;
+ t_lr1_item new_item;
+ t_ft_stat res;
+
+ i = 0;
+ while (i < rules->size)
+ {
+ new_item.core.rule = ft_vec_caccess(rules, i);
+ if (!cmp_token_type(&new_item.core.rule->result, get_next_token(&item->core)))
+ {
+ new_item.core.position = 0;
+ res = add_lookahead(&new_item, item, rules, tokens);
+ if (res != success)
+ return (res);
+ res = ft_vec_append(closure, &new_item);
+ if (res != success)
+ {
+ free_item(&new_item);
+ return (res);
+ }
+ }
+ ++i;
+ }
return (success);
}
while (i < kernel->size)
{
item = ft_vec_caccess(kernel, i);
- res = add_prediction(closure, item, rules);
+ res = add_predictions(closure, item, rules, tokens);
if (res != success)
return (res);
++i;
while (i < closure->size)
{
item = ft_vec_caccess(closure, i);
- res = add_prediction(closure, item, rules);
+ res = add_predictions(closure, item, rules, tokens);
if (res != success)
return (res);
++i;