aboutsummaryrefslogtreecommitdiff
path: root/src/tokenize.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenize.c')
-rw-r--r--src/tokenize.c44
1 files changed, 22 insertions, 22 deletions
diff --git a/src/tokenize.c b/src/tokenize.c
index 645b79f..691b9c8 100644
--- a/src/tokenize.c
+++ b/src/tokenize.c
@@ -7,8 +7,8 @@
int token_equals(struct token token, const char *match)
{
const char *at = match;
- for(int i = 0; i < token.length; ++i, ++at) {
- if((*at == 0) || (token.text[i] != *at)) {
+ for (int i = 0; i < token.length; ++i, ++at) {
+ if ((*at == 0) || (token.text[i] != *at)) {
return false;
}
}
@@ -18,7 +18,7 @@ int token_equals(struct token token, const char *match)
internal void
advance(struct tokenizer *tokenizer)
{
- if(*tokenizer->at == '\n') {
+ if (*tokenizer->at == '\n') {
tokenizer->cursor = 0;
++tokenizer->line;
}
@@ -29,7 +29,7 @@ advance(struct tokenizer *tokenizer)
internal void
eat_whitespace(struct tokenizer *tokenizer)
{
- while(*tokenizer->at && isspace(*tokenizer->at)) {
+ while (*tokenizer->at && isspace(*tokenizer->at)) {
advance(tokenizer);
}
}
@@ -37,7 +37,7 @@ eat_whitespace(struct tokenizer *tokenizer)
internal void
eat_comment(struct tokenizer *tokenizer)
{
- while(*tokenizer->at && *tokenizer->at != '\n') {
+ while (*tokenizer->at && *tokenizer->at != '\n') {
advance(tokenizer);
}
}
@@ -45,8 +45,8 @@ eat_comment(struct tokenizer *tokenizer)
internal void
eat_command(struct tokenizer *tokenizer)
{
- while(*tokenizer->at && *tokenizer->at != '\n') {
- if(*tokenizer->at == '\\') {
+ while (*tokenizer->at && *tokenizer->at != '\n') {
+ if (*tokenizer->at == '\\') {
advance(tokenizer);
}
advance(tokenizer);
@@ -56,9 +56,9 @@ eat_command(struct tokenizer *tokenizer)
internal void
eat_hex(struct tokenizer *tokenizer)
{
- while((*tokenizer->at) &&
- ((isdigit(*tokenizer->at)) ||
- (*tokenizer->at >= 'A' && *tokenizer->at <= 'F'))) {
+ while ((*tokenizer->at) &&
+ ((isdigit(*tokenizer->at)) ||
+ (*tokenizer->at >= 'A' && *tokenizer->at <= 'F'))) {
advance(tokenizer);
}
}
@@ -66,11 +66,11 @@ eat_hex(struct tokenizer *tokenizer)
internal void
eat_identifier(struct tokenizer *tokenizer)
{
- while((*tokenizer->at) && isalpha(*tokenizer->at)) {
+ while ((*tokenizer->at) && isalpha(*tokenizer->at)) {
advance(tokenizer);
}
- while((*tokenizer->at) && isdigit(*tokenizer->at)) {
+ while ((*tokenizer->at) && isdigit(*tokenizer->at)) {
advance(tokenizer);
}
}
@@ -78,18 +78,18 @@ eat_identifier(struct tokenizer *tokenizer)
internal enum token_type
resolve_identifier_type(struct token token)
{
- if(token.length == 1) {
+ if (token.length == 1) {
return Token_Key;
}
- for(int i = 0; i < array_count(modifier_flags_str); ++i) {
- if(token_equals(token, modifier_flags_str[i])) {
+ for (int i = 0; i < array_count(modifier_flags_str); ++i) {
+ if (token_equals(token, modifier_flags_str[i])) {
return Token_Modifier;
}
}
- for(int i = 0; i < array_count(literal_keycode_str); ++i) {
- if(token_equals(token, literal_keycode_str[i])) {
+ for (int i = 0; i < array_count(literal_keycode_str); ++i) {
+ if (token_equals(token, literal_keycode_str[i])) {
return Token_Literal;
}
}
@@ -118,7 +118,7 @@ get_token(struct tokenizer *tokenizer)
c = *token.text;
advance(tokenizer);
- switch(c) {
+ switch (c) {
case '\0':{ token.type = Token_EndOfStream; } break;
case '+': { token.type = Token_Plus; } break;
case '#': {
@@ -126,7 +126,7 @@ get_token(struct tokenizer *tokenizer)
token = get_token(tokenizer);
} break;
case '-': {
- if(*tokenizer->at && *tokenizer->at == '>') {
+ if (*tokenizer->at && *tokenizer->at == '>') {
advance(tokenizer);
token.length = tokenizer->at - token.text;
token.type = Token_Arrow;
@@ -146,14 +146,14 @@ get_token(struct tokenizer *tokenizer)
token.type = Token_Command;
} break;
default: {
- if(c == '0' && *tokenizer->at == 'x') {
+ if (c == '0' && *tokenizer->at == 'x') {
advance(tokenizer);
eat_hex(tokenizer);
token.length = tokenizer->at - token.text;
token.type = Token_Key_Hex;
- } else if(isdigit(c)) {
+ } else if (isdigit(c)) {
token.type = Token_Key;
- } else if(isalpha(c)) {
+ } else if (isalpha(c)) {
eat_identifier(tokenizer);
token.length = tokenizer->at - token.text;
token.type = resolve_identifier_type(token);