@@ -283,26 +283,18 @@ public fn void Tokenizer.init(Tokenizer* t,
283
283
const string_list.List* features,
284
284
bool raw_mode)
285
285
{
286
+ string.memset(t, 0, sizeof(Tokenizer));
286
287
t.cur = input;
287
288
t.loc_start = loc_start;
288
289
t.input_start = input;
289
290
t.kwinfo = kwinfo;
290
291
291
- for (u32 i=0; i<MaxLookahead; i++) {
292
- t.next[i].init();
293
- }
294
- t.next_count = 0;
295
- t.next_head = 0;
296
292
t.line_start = input;
297
293
t.pool = pool;
298
294
t.buf = buf;
299
295
300
- string.memset(&t.feature_stack, 0, sizeof(t.feature_stack));
301
- t.feature_count = 0;
302
296
t.features = features;
303
297
t.raw_mode = raw_mode;
304
- t.stop_at_eol = false;
305
- t.error_msg[0] = 0;
306
298
}
307
299
308
300
public fn void Tokenizer.lex(Tokenizer* t, Token* result) {
@@ -320,6 +312,7 @@ fn void Tokenizer.lex_internal(Tokenizer* t, Token* result) {
320
312
// TODO if end/error stop (dont retry) (t.done = 1)
321
313
322
314
while (1) {
315
+ result.init();
323
316
result.loc = t.loc_start + cast<SrcLoc>(t.cur - t.input_start);
324
317
Action act = Char_lookup[cast<u8>(*t.cur)];
325
318
switch (act) {
@@ -650,7 +643,7 @@ fn void Tokenizer.lex_internal(Tokenizer* t, Token* result) {
650
643
return;
651
644
}
652
645
result.kind = Kind.Eof;
653
- result.more = false ;
646
+ result.done = true ;
654
647
return;
655
648
}
656
649
}
@@ -692,7 +685,7 @@ fn void Tokenizer.error(Tokenizer* t, Token* result, const char* format @(printf
692
685
result.loc = t.loc_start + cast<SrcLoc>(t.cur - t.input_start);
693
686
result.kind = Kind.Error;
694
687
result.error_msg = t.error_msg;
695
- result.more = false ;
688
+ result.done = true ;
696
689
result.has_error = true;
697
690
}
698
691
@@ -1006,7 +999,7 @@ too_large:
1006
999
// Returns how much to shift in source code (0 = error)
1007
1000
fn u32 Tokenizer.lex_escaped_char(Tokenizer* t, Token* result, const char* stype) {
1008
1001
// Note: t.cur is on '\'
1009
- const char* input = t.cur + 1; // skip backspace
1002
+ const char* input = t.cur + 1; // skip backslash
1010
1003
switch (input[0]) {
1011
1004
case 0:
1012
1005
case '\r':
@@ -1139,7 +1132,6 @@ fn void Tokenizer.lex_char_literal(Tokenizer* t, Token* result) {
1139
1132
t.error(result, "multi-character character constant");
1140
1133
} else {
1141
1134
t.error(result, "missing terminating ' character (GOT %c)", *t.cur);
1142
- //t.error(result, "missing terminating ' character");
1143
1135
}
1144
1136
return;
1145
1137
}
@@ -1357,36 +1349,35 @@ type Operand struct {
1357
1349
u8 prec;
1358
1350
}
1359
1351
1360
- fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1352
+ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t, Token *result ) {
1361
1353
Operand[MAX_LEVEL] stack;
1362
1354
Operand *sp;
1363
1355
Kind op;
1364
1356
u8 prec;
1365
1357
i64 val = 0;
1366
- Token tok;
1367
1358
bool prefix = true;
1368
1359
1369
1360
for (sp = stack;;) {
1370
- op = t.lex_preproc(&tok );
1361
+ op = t.lex_preproc(result );
1371
1362
if (prefix) {
1372
1363
switch (op) {
1373
1364
case Identifier:
1374
1365
val = 0;
1375
- const char *id = t.pool.idx2str(tok .text_idx);
1366
+ const char *id = t.pool.idx2str(result .text_idx);
1376
1367
if (!string.strcmp(id, "defined")) {
1377
1368
bool has_paren = false;
1378
- if (t.lex_preproc(&tok ) == Kind.LParen) {
1369
+ if (t.lex_preproc(result ) == Kind.LParen) {
1379
1370
has_paren = true;
1380
- t.lex_preproc(&tok );
1371
+ t.lex_preproc(result );
1381
1372
}
1382
- if (tok .kind == Kind.Identifier) {
1383
- id = t.pool.idx2str(tok .text_idx);
1373
+ if (result .kind == Kind.Identifier) {
1374
+ id = t.pool.idx2str(result .text_idx);
1384
1375
} else {
1385
- t.error(&tok , "missing identifier after 'defined'");
1376
+ t.error(result , "missing identifier after 'defined'");
1386
1377
return 0;
1387
1378
}
1388
1379
if (has_paren) {
1389
- if (t.lex_preproc(&tok ) != Kind.RParen)
1380
+ if (t.lex_preproc(result ) != Kind.RParen)
1390
1381
goto syntax_error;
1391
1382
}
1392
1383
val = t.features.contains(id);
@@ -1398,11 +1389,11 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1398
1389
continue;
1399
1390
case IntegerLiteral:
1400
1391
// TODO: handle signed/unsigned issues
1401
- val = cast<i64>(tok .int_value);
1392
+ val = cast<i64>(result .int_value);
1402
1393
prefix = false;
1403
1394
continue;
1404
1395
case CharLiteral:
1405
- val = tok .char_value;
1396
+ val = result .char_value;
1406
1397
prefix = false;
1407
1398
continue;
1408
1399
case LParen:
@@ -1423,15 +1414,15 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1423
1414
default:
1424
1415
break;
1425
1416
}
1426
- t.error(&tok , "missing operand in preprocessor expression");
1417
+ t.error(result , "missing operand in preprocessor expression");
1427
1418
return 0;
1428
1419
}
1429
1420
switch (op) {
1430
1421
case Identifier:
1431
1422
case IntegerLiteral:
1432
1423
case CharLiteral:
1433
1424
case LParen:
1434
- t.error(&tok , "missing operator in preprocessor expression");
1425
+ t.error(result , "missing operator in preprocessor expression");
1435
1426
return 0;
1436
1427
default:
1437
1428
break;
@@ -1510,8 +1501,7 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1510
1501
prec = 13;
1511
1502
break;
1512
1503
default:
1513
- t.error(&tok, "invalid token in preprocessor expression '%s'",
1514
- tok.kind.str());
1504
+ t.error(result, "invalid token in preprocessor expression '%s'", result.kind.str());
1515
1505
return 0;
1516
1506
}
1517
1507
@@ -1521,7 +1511,7 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1521
1511
switch (sp.op) {
1522
1512
case LParen:
1523
1513
if (op != Kind.RParen) {
1524
- t.error(&tok , "missing parenthesis in preprocessor expression");
1514
+ t.error(result , "missing parenthesis in preprocessor expression");
1525
1515
return 0;
1526
1516
}
1527
1517
op = Kind.None;
@@ -1589,8 +1579,7 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1589
1579
}
1590
1580
fallthrough;
1591
1581
default:
1592
- t.error(&tok, "invalid token in preprocessor expression '%s'",
1593
- sp.op.str());
1582
+ t.error(result, "invalid token in preprocessor expression '%s'", sp.op.str());
1594
1583
return 0;
1595
1584
}
1596
1585
break;
@@ -1599,7 +1588,7 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1599
1588
break;
1600
1589
if (sp >= stack + MAX_LEVEL) {
1601
1590
too_deep:
1602
- t.error(&tok , "preprocessor expression too complex");
1591
+ t.error(result , "preprocessor expression too complex");
1603
1592
return 0;
1604
1593
}
1605
1594
sp.val = val;
@@ -1609,7 +1598,7 @@ fn i64 Tokenizer.parse_ppexpr(Tokenizer* t) {
1609
1598
}
1610
1599
if (sp > stack) {
1611
1600
syntax_error:
1612
- t.error(&tok , "syntax error in preprocessor expression");
1601
+ t.error(result , "syntax error in preprocessor expression");
1613
1602
return 0;
1614
1603
}
1615
1604
return val;
@@ -1650,7 +1639,7 @@ fn bool Tokenizer.handle_if(Tokenizer* t, Token* result, Kind kind) {
1650
1639
}
1651
1640
1652
1641
if (kind == Kind.Feat_if || kind == Kind.Feat_elif) {
1653
- if (!t.parse_ppexpr())
1642
+ if (!t.parse_ppexpr(result ))
1654
1643
top.skipping = 1;
1655
1644
} else {
1656
1645
/* handle Kind.Feat_ifdef, Kind.Feat_ifndef */
0 commit comments