parser V[chepa combien]

This commit is contained in:
Nathan Lebrun
2025-01-20 13:24:33 +01:00
parent 81e0c7c4ee
commit b319c2129f
6 changed files with 178 additions and 73 deletions

View File

@@ -3,19 +3,21 @@
/* ::: :::::::: */
/* parse.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2025/01/15 08:23:41 by nalebrun #+# #+# */
/* Updated: 2025/01/15 08:23:41 by nalebrun ### ########.fr */
/* Created: 2025/01/15 08:23:41 by nalebrun #+# #+# */
/* Updated: 2025/01/20 13:15:47 by nalebrun ### ########.fr */
/* */
/* ************************************************************************** */
#include "tokenizer/tokenizer.h"
void truncate_comment(char *str)
void truncate_comment(char *str)
{
int i;
int i;
if (!str)
return ;
i = 0;
while (str[i])
{
@@ -28,18 +30,18 @@ void truncate_comment(char *str)
}
}
int main (int ac, char **av)
int main(int ac, char **av)
{
t_node *lst;
char *expanded;
(void)ac;
t_node *lst;
truncate_comment(av[1]);
lst = tokenize(av[1]);
expanded = expander(av[1]);
lst = tokenize(expanded);
if (!lst)
return (1);
// debug_linked_list(lst);
return (free(expanded), 1);
ft_free(expanded);
debug_linked_list(lst, "Tokenized");
free_linked_list(lst);
// free la list wesh
}

View File

@@ -1,12 +1,12 @@
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* tokenizer_utils.c :+: :+: :+: */
/* linked_list.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2025/01/15 13:38:49 by nalebrun #+# #+# */
/* Updated: 2025/01/15 13:38:49 by nalebrun ### ########.fr */
/* Created: 2025/01/15 13:38:49 by nalebrun #+# #+# */
/* Updated: 2025/01/20 13:15:03 by nalebrun ### ########.fr */
/* */
/* ************************************************************************** */
@@ -41,20 +41,20 @@ int add_node_back(t_node *head, char *val, t_token token)
void free_linked_list(t_node *head)
{
t_node *tmp;
t_node *tmp;
while (head)
{
tmp = head;
head = head->next;
free(tmp->val);
free(tmp);
}
while (head)
{
tmp = head;
head = head->next;
free(tmp->val);
free(tmp);
}
}
int create_node_after(t_node *elem, char *val)
{
t_node *tmp_next;
t_node *tmp_next;
tmp_next = elem->next;
elem->next = create_node(val, 0);
@@ -64,25 +64,45 @@ int create_node_after(t_node *elem, char *val)
return (1);
}
// have to be deleted after
void debug_linked_list(t_node *head, char *msg)
int merge_with_next_node(t_node *node)
{
t_node *current = head;
char *token;
char *tmp_val;
t_node *tmp_next;
printf("----------------------------------------{%s} \n", msg);
while (current != NULL) {
tmp_val = ft_strjoin(node->val, node->next->val);
if (!tmp_val)
return (0);
ft_free(&node->val);
node->val = tmp_val;
ft_free(&node->next->val);
tmp_next = node->next->next;
free(node->next);
node->next = tmp_next;
return (1);
}
// have to be deleted after
void debug_linked_list(t_node *head, char *msg)
{
t_node *current;
char *token;
current = head;
printf("----------------------------------------------------------{%s} \n",
msg);
while (current != NULL)
{
if (current->token == OPERATOR)
token = ft_strdup("OPERATOR");
else if (current->token == WORD)
token = ft_strdup("WORD");
token = ft_strdup("WORD ");
else if (current->token == UNSET)
token = ft_strdup("UNSET");
token = ft_strdup("UNSET ");
else
token = ft_strdup("???");
printf("| Node - TOKEN: %s -> val: |%s|\n", token, current->val);
token = ft_strdup("??? ");
printf("| Node - TOKEN: %s -> val: |%s|\n", token, current->val);
free(token);
current = current->next;
}
printf("----------------------------------------\n\n");
current = current->next;
}
printf("----------------------------------------------------------\n\n");
}

View File

@@ -3,10 +3,10 @@
/* ::: :::::::: */
/* tokenizer.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2025/01/15 13:27:57 by nalebrun #+# #+# */
/* Updated: 2025/01/15 13:27:57 by nalebrun ### ########.fr */
/* Created: 2025/01/15 13:27:57 by nalebrun #+# #+# */
/* Updated: 2025/01/20 13:15:25 by nalebrun ### ########.fr */
/* */
/* ************************************************************************** */
@@ -37,7 +37,7 @@ static t_node *tokenize_base(char *str)
static void set_token(t_node *head)
{
t_node *it;
t_node *it;
it = head;
while (it != NULL)
@@ -49,18 +49,47 @@ static void set_token(t_node *head)
static int unstick_nodes(t_node *head)
{
t_node *it;
t_node *it;
char *first_str;
char *second_str;
int copied;
it = head;
while (it != NULL)
{
if (is_sticked(it->val)) // undefined fct for the moment
if (is_sticked(it->val))
{
// if meta -> first part = jusqua plus meta
// else -> first part = jusqua meta
// secnd part = rest
// it->val = first part
// create a new node after [create_node_after()] with the second part of the string
if (is_meta(it->val[0]))
first_str = copy_meta_xor(it->val, &copied, 0);
else
first_str = copy_meta_xor(it->val, &copied, 1);
second_str = ft_substr(it->val, copied, ft_strlen(it->val)
- copied);
ft_free(&it->val);
it->val = ft_strdup(first_str);
create_node_after(it, second_str);
ft_free(&first_str);
ft_free(&second_str);
}
it = it->next;
}
return (1);
}
static int stick_quote_node(t_node *head)
{
t_node *it;
it = head;
while (it != NULL)
{
if (it->val[0] == '"')
{
while (it->next->val[0] != '"')
if (!merge_with_next_node(it))
return (0);
if (!merge_with_next_node(it))
return (0);
}
it = it->next;
}
@@ -74,13 +103,9 @@ t_node *tokenize(char *str)
head = tokenize_base(str);
if (!head)
return (NULL);
debug_linked_list(head, "base tokenized");
if (!unstick_nodes(head))
return (NULL);
debug_linked_list(head, "nodes unsticked");
// stick_quote_node(head);
// debug_linked_list(head);
stick_quote_node(head);
set_token(head);
debug_linked_list(head, "token set");
return (head);
}

View File

@@ -1,16 +1,16 @@
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* parser.h :+: :+: :+: */
/* tokenizer.h :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* By: nalebrun <nalebrun@student.s19.be> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2025/01/15 13:30:12 by nalebrun #+# #+# */
/* Updated: 2025/01/15 13:30:12 by nalebrun ### ########.fr */
/* Created: 2025/01/15 13:30:12 by nalebrun #+# #+# */
/* Updated: 2025/01/20 13:15:34 by nalebrun ### ########.fr */
/* */
/* ************************************************************************** */
#ifndef TOKENIZER_H
#ifndef TOKENIZER_H
# define TOKENIZER_H
# include "../includes/minishell.h"
@@ -20,7 +20,7 @@ typedef enum e_token
UNSET,
OPERATOR,
WORD
} t_token;
} t_token;
typedef struct s_node
{
@@ -29,12 +29,17 @@ typedef struct s_node
enum e_token token;
} t_node;
t_node *tokenize(char *str);
t_node *create_node(char *val, t_token token);
int add_node_back(t_node *head, char *val, t_token token);
void free_linked_list(t_node *stack);
t_token get_token(char *str);
int create_node_after(t_node *elem, char *val);
void debug_linked_list(t_node *head, char *msg);
t_node *tokenize(char *str);
t_node *create_node(char *val, t_token token);
int add_node_back(t_node *head, char *val, t_token token);
int merge_with_next_node(t_node *node);
void free_linked_list(t_node *stack);
t_token get_token(char *str);
int create_node_after(t_node *elem, char *val);
char *copy_meta_xor(char *val, int *copied, int rev);
int is_meta(char c);
int is_sticked(char *val);
void debug_linked_list(t_node *head, char *msg);
#endif

View File

@@ -2,13 +2,43 @@
t_token get_token(char *str)
{
t_token token;
t_token token;
if (!strncmp(str, "&", 1) || !strncmp(str, "|", 1)
|| !strncmp(str, "(", 1) || !strncmp(str, ")", 1)
|| !strncmp(str, "<", 1) || !strncmp(str, ">", 1))
if (!strncmp(str, "&", 1) || !strncmp(str, "|", 1) || !strncmp(str, "(", 1)
|| !strncmp(str, ")", 1) || !strncmp(str, "<", 1) || !strncmp(str, ">",
1))
token = OPERATOR;
else
token = WORD;
return (token);
}
int is_meta(char c)
{
if (c == '&' || c == '|' || c == '<' || c == '>' || c == '(' || c == ')'
|| c == '"')
return (1);
return (0);
}
int is_sticked(char *val)
{
int i;
int meta;
int unmeta;
i = 0;
meta = 0;
unmeta = 0;
while (val[i])
{
if (is_meta(val[i]))
meta = 1;
if (!is_meta(val[i]))
unmeta = 1;
i++;
}
if (meta && unmeta)
return (1);
return (0);
}

View File

@@ -0,0 +1,23 @@
#include "tokenizer.h"
char *copy_meta_xor(char *val, int *copied, int rev)
{
int i;
int j;
char *out;
i = 0;
while (is_meta(val[i]) ^ rev)
{
if (!rev && val[i] != val[0])
break ;
i++;
}
*copied = i;
out = malloc(i + 1);
j = -1;
while (++j < i)
out[j] = val[j];
out[i] = 0;
return (out);
}