aboutsummaryrefslogtreecommitdiff
path: root/src/tokenize.c
diff options
context:
space:
mode:
authorkoekeishiya <aasvi93@hotmail.com>2017-09-12 12:43:38 +0200
committerkoekeishiya <aasvi93@hotmail.com>2017-09-12 12:43:38 +0200
commit07f864c03ecdfbbc761cdfa8cf54321ba17bec47 (patch)
tree1275e554d48a3e43877843340b9f03cc763932a8 /src/tokenize.c
parent1723966a65c5fd591aca85ed427e53674bae056e (diff)
downloadskhd-07f864c03ecdfbbc761cdfa8cf54321ba17bec47.tar.gz
skhd-07f864c03ecdfbbc761cdfa8cf54321ba17bec47.zip
code cleanup
Diffstat (limited to 'src/tokenize.c')
-rw-r--r--src/tokenize.c89
1 files changed, 42 insertions, 47 deletions
diff --git a/src/tokenize.c b/src/tokenize.c
index 28a5336..4feca6d 100644
--- a/src/tokenize.c
+++ b/src/tokenize.c
@@ -121,54 +121,49 @@ get_token(struct tokenizer *tokenizer)
c = *token.text;
advance(tokenizer);
- switch(c)
- {
- case '\0': { token.type = Token_EndOfStream; } break;
- case '+': { token.type = Token_Plus; } break;
- case '-':
- {
- if(*tokenizer->at && *tokenizer->at == '>') {
- advance(tokenizer);
- token.length = tokenizer->at - token.text;
- token.type = Token_Arrow;
- } else {
- token.type = Token_Dash;
- }
- } break;
- case ':':
- {
- eat_whitespace(tokenizer);
-
- token.text = tokenizer->at;
- token.line = tokenizer->line;
- token.cursor = tokenizer->cursor;
-
- eat_command(tokenizer);
+ switch(c) {
+ case '\0':{ token.type = Token_EndOfStream; } break;
+ case '+': { token.type = Token_Plus; } break;
+ case '#': {
+ eat_comment(tokenizer);
+ token = get_token(tokenizer);
+ } break;
+ case '-': {
+ if(*tokenizer->at && *tokenizer->at == '>') {
+ advance(tokenizer);
+ token.length = tokenizer->at - token.text;
+ token.type = Token_Arrow;
+ } else {
+ token.type = Token_Dash;
+ }
+ } break;
+ case ':': {
+ eat_whitespace(tokenizer);
+
+ token.text = tokenizer->at;
+ token.line = tokenizer->line;
+ token.cursor = tokenizer->cursor;
+
+ eat_command(tokenizer);
+ token.length = tokenizer->at - token.text;
+ token.type = Token_Command;
+ } break;
+ default: {
+ if(c == '0' && *tokenizer->at == 'x') {
+ advance(tokenizer);
+ eat_hex(tokenizer);
+ token.length = tokenizer->at - token.text;
+ token.type = Token_Key_Hex;
+ } else if(isdigit(c)) {
+ token.type = Token_Key;
+ } else if(isalpha(c)) {
+ eat_identifier(tokenizer);
token.length = tokenizer->at - token.text;
- token.type = Token_Command;
- } break;
- case '#':
- {
- eat_comment(tokenizer);
- token = get_token(tokenizer);
- } break;
- default:
- {
- if(c == '0' && *tokenizer->at == 'x') {
- advance(tokenizer);
- eat_hex(tokenizer);
- token.length = tokenizer->at - token.text;
- token.type = Token_Key_Hex;
- } else if(isdigit(c)) {
- token.type = Token_Key;
- } else if(isalpha(c)) {
- eat_identifier(tokenizer);
- token.length = tokenizer->at - token.text;
- token.type = resolve_identifier_type(token);
- } else {
- token.type = Token_Unknown;
- }
- } break;
+ token.type = resolve_identifier_type(token);
+ } else {
+ token.type = Token_Unknown;
+ }
+ } break;
}
return token;