diff --git a/srcs/parsing/tokenizer/tokenizer.c b/srcs/parsing/tokenizer/tokenizer.c index 02c3712..7a0378a 100644 --- a/srcs/parsing/tokenizer/tokenizer.c +++ b/srcs/parsing/tokenizer/tokenizer.c @@ -89,7 +89,7 @@ static int stick_quote_node(t_node *head, char q) it = head; while (it != NULL) { - if (ft_strchr(it->val, q)) + if (ft_strchr(it->val, q) && ft_strchr(it->val, q) == ft_strrchr(it->val, q)) { while (it->next && !ft_strchr(it->next->val, q)) if (!merge_with_next_node(it)) @@ -152,19 +152,19 @@ t_node *tokenize(char *str) head = tokenize_base(str); if (!head) return (NULL); - // debug_token_list(head, "tokenize_base"); + debug_token_list(head, "tokenize_base"); if (!trim_nodes(head)) return (NULL); - // debug_token_list(head, "trim_nodes"); + debug_token_list(head, "trim_nodes"); if (!unstick_nodes(head)) return (NULL); - // debug_token_list(head, "unstick_nodes"); + debug_token_list(head, "unstick_nodes"); stick_quote_node(head, 39); stick_quote_node(head, '"'); - // debug_token_list(head, "stick quote node"); + debug_token_list(head, "stick quote node"); if (!trim_nodes(head)) return (NULL); - // debug_token_list(head, "trim_nodes"); + debug_token_list(head, "trim_nodes"); set_token(head); del_void_nodes(&head); debug_token_list(head, "tokenizer");