tokenizer v2
This commit is contained in:
@@ -25,8 +25,13 @@ typedef struct s_node
|
||||
|
||||
t_node *tokenize(char *str);
|
||||
t_node *create_node(char *token, int priority, int depth);
|
||||
void add_node_back(t_node* head, int depth, char *token);
|
||||
void add_node_back(t_node *head, int depth, char *token);
|
||||
void ajust_depth(int *depth, char c);
|
||||
int get_priority(char *token);
|
||||
char *copy_token_string(char *start, char last_token);
|
||||
int goto_next_token(char *str);
|
||||
int skip_meta_token(char *str);
|
||||
int is_meta_token(char c);
|
||||
int go_to_next_meta_token(char *str);
|
||||
|
||||
#endif
|
||||
|
||||
Reference in New Issue
Block a user