diff --git a/lib/libft/srcs/format/ft_tabstr.c b/lib/libft/srcs/format/ft_tabstr.c index 89b8dca..f7c55a3 100644 --- a/lib/libft/srcs/format/ft_tabstr.c +++ b/lib/libft/srcs/format/ft_tabstr.c @@ -19,6 +19,8 @@ char *ft_tabstr(char **tab) char *out; char *tmp; + if (!tab) + return (NULL); i = -1; alloc_count = 0; while (tab[++i]) diff --git a/srcs/parsing/drawio/drawio_ast_utils.c b/srcs/parsing/drawio/drawio_ast_utils.c index f15dae1..96e2f9e 100644 --- a/srcs/parsing/drawio/drawio_ast_utils.c +++ b/srcs/parsing/drawio/drawio_ast_utils.c @@ -41,7 +41,10 @@ t_dio_node get_cmd_txt(t_ast_n *node) { txt.cmd = ft_sprintf("%s%s", NL, node->cmd); txt.cmd = replace_left_red(txt.cmd); - args = ft_tabstr(node->args); + if (node->args && node->args[0]) + args = ft_tabstr(node->args); + else + args = ft_strdup(""); txt.args = ft_sprintf("%s%s%s", NL, args, NL); free(args); txt.files = ft_sprintf("redir: UNCHECKED\n"); diff --git a/srcs/parsing/heredoc.c b/srcs/parsing/heredoc.c index f81df06..1e5b369 100644 --- a/srcs/parsing/heredoc.c +++ b/srcs/parsing/heredoc.c @@ -13,7 +13,7 @@ void read_hereinput(char *limiter) r = read(0, &c, 1); if (r == 0) { - ft_fprintf (2, "etdsttdt\n"); + ft_fprintf (2, "\n"); exit(EXIT_SUCCESS); } while (r && c != '\n' && c != '\0') diff --git a/srcs/parsing/tokenizer/tokenizer.c b/srcs/parsing/tokenizer/tokenizer.c index 73c991c..02c3712 100644 --- a/srcs/parsing/tokenizer/tokenizer.c +++ b/srcs/parsing/tokenizer/tokenizer.c @@ -152,19 +152,19 @@ t_node *tokenize(char *str) head = tokenize_base(str); if (!head) return (NULL); - debug_token_list(head, "tokenize_base"); + // debug_token_list(head, "tokenize_base"); if (!trim_nodes(head)) return (NULL); - debug_token_list(head, "trim_nodes"); + // debug_token_list(head, "trim_nodes"); if (!unstick_nodes(head)) return (NULL); - debug_token_list(head, "unstick_nodes"); + // debug_token_list(head, "unstick_nodes"); stick_quote_node(head, 39); stick_quote_node(head, '"'); - debug_token_list(head, "stick quote node"); + // debug_token_list(head, "stick quote node"); if (!trim_nodes(head)) return (NULL); - debug_token_list(head, "trim_nodes"); + // debug_token_list(head, "trim_nodes"); set_token(head); del_void_nodes(&head); debug_token_list(head, "tokenizer");