Skip to content

Commit 33bfd69

Browse files
committed
syntax annoyance: a "for" follow of a toggle consumed a valid for loop, peek 2 tokens to see if an "in" is there to avoid consuming loops, also updated peek to ignore whitespace :/
1 parent be96dee commit 33bfd69

4 files changed

Lines changed: 295 additions & 3 deletions

File tree

src/core/tokenizer.js

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,8 +154,17 @@ export class Tokens {
154154
peekToken(value, peek, type) {
155155
peek = peek || 0;
156156
type = type || "IDENTIFIER";
157-
if (this.#tokens[peek] && this.#tokens[peek].value === value && this.#tokens[peek].type === type) {
158-
return this.#tokens[peek];
157+
let peekNoWhitespace = 0;
158+
while (peek > 0) {
159+
peekNoWhitespace++;
160+
if (this.#tokens[peekNoWhitespace]?.type !== "WHITESPACE") {
161+
peek--;
162+
}
163+
}
164+
if (this.#tokens[peekNoWhitespace] &&
165+
this.#tokens[peekNoWhitespace].value === value &&
166+
this.#tokens[peekNoWhitespace].type === type) {
167+
return this.#tokens[peekNoWhitespace];
159168
}
160169
}
161170

src/parsetree/commands/dom.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -564,7 +564,8 @@ export class ToggleCommand extends VisibilityCommand {
564564
var evt = null;
565565
var from = null;
566566

567-
if (parser.matchToken("for")) {
567+
if (parser.peekToken("for") && !parser.peekToken("in", 2)) {
568+
parser.matchToken("for")
568569
time = parser.requireElement("expression");
569570
} else if (parser.matchToken("until")) {
570571
evt = parser.requireElement("dotOrColonPath", "Expected event name");

test/commands/toggle.js

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,37 @@ test.describe("the toggle command", () => {
5555
await expect(find('div')).not.toHaveClass(/foo/);
5656
});
5757

58+
test("toggle does not consume a following for-in loop", async ({html, find, page}) => {
59+
await html(
60+
"<div id='out'></div>" +
61+
"<div id='btn' _=\"on click " +
62+
" toggle .foo " +
63+
" for x in [1, 2, 3] " +
64+
" put x into #out " +
65+
" end\"></div>"
66+
);
67+
const btn = page.locator('#btn');
68+
await expect(btn).not.toHaveClass(/foo/);
69+
await btn.dispatchEvent('click');
70+
await expect(btn).toHaveClass(/foo/);
71+
await expect(find('#out')).toHaveText('3');
72+
});
73+
74+
test("toggle between followed by for-in loop works", async ({html, find, page}) => {
75+
await html(
76+
"<div id='out'></div>" +
77+
"<div id='btn' class='a' _=\"on click " +
78+
" toggle between .a and .b " +
79+
" for x in [1, 2] " +
80+
" put x into #out " +
81+
" end\"></div>"
82+
);
83+
const btn = page.locator('#btn');
84+
await btn.dispatchEvent('click');
85+
await expect(btn).toHaveClass(/b/);
86+
await expect(find('#out')).toHaveText('2');
87+
});
88+
5889
test("can toggle until an event on another element", async ({html, find, evaluate}) => {
5990
await html("<div id='d1'></div><div _='on click toggle .foo until foo from #d1'></div>");
6091
await expect(find('div:nth-of-type(2)')).not.toHaveClass(/foo/);

test/core/tokenizer.js

Lines changed: 251 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -395,3 +395,254 @@ test.describe("the _hyperscript tokenizer", () => {
395395
});
396396

397397
});
398+
399+
test.describe("the Tokens API", () => {
400+
401+
test("peekToken skips whitespace when looking ahead", async ({evaluate}) => {
402+
const results = await evaluate(() => {
403+
const t = _hyperscript.internals.tokenizer;
404+
const r = {};
405+
406+
// for x in items → tokens are: for, WS, x, WS, in, WS, items
407+
const forIn = t.tokenize("for x in items");
408+
r.peek0 = forIn.peekToken("for", 0)?.value ?? null;
409+
r.peek1 = forIn.peekToken("x", 1)?.value ?? null;
410+
r.peek2 = forIn.peekToken("in", 2)?.value ?? null;
411+
r.peek3 = forIn.peekToken("items", 3)?.value ?? null;
412+
413+
// peek that shouldn't match
414+
r.peekMiss = forIn.peekToken("in", 1) ?? null;
415+
416+
// for 10ms — "in" is never present
417+
const forDur = t.tokenize("for 10ms");
418+
r.durPeek2 = forDur.peekToken("in", 2) ?? null;
419+
420+
// Extra whitespace between tokens is tolerated
421+
const extraWs = t.tokenize("for x in items");
422+
r.extraPeek2 = extraWs.peekToken("in", 2)?.value ?? null;
423+
424+
// Comments between tokens are tolerated
425+
const withComment = t.tokenize("for -- comment\nx in items");
426+
r.commentPeek2 = withComment.peekToken("in", 2)?.value ?? null;
427+
428+
// Newlines as whitespace
429+
const multiline = t.tokenize("for\nx\nin\nitems");
430+
r.multiPeek2 = multiline.peekToken("in", 2)?.value ?? null;
431+
432+
// Type defaults to IDENTIFIER — matching against an operator requires explicit type
433+
const withOp = t.tokenize("a + b");
434+
r.opDefault = withOp.peekToken("+", 1) ?? null; // IDENTIFIER type, won't match
435+
r.opExplicit = withOp.peekToken("+", 1, "PLUS")?.value ?? null;
436+
437+
// Lookahead past the end returns undefined
438+
const short = t.tokenize("foo");
439+
r.beyondEnd = short.peekToken("anything", 5) ?? null;
440+
441+
return r;
442+
});
443+
444+
expect(results.peek0).toBe("for");
445+
expect(results.peek1).toBe("x");
446+
expect(results.peek2).toBe("in");
447+
expect(results.peek3).toBe("items");
448+
expect(results.peekMiss).toBeNull();
449+
expect(results.durPeek2).toBeNull();
450+
expect(results.extraPeek2).toBe("in");
451+
expect(results.commentPeek2).toBe("in");
452+
expect(results.multiPeek2).toBe("in");
453+
expect(results.opDefault).toBeNull();
454+
expect(results.opExplicit).toBe("+");
455+
expect(results.beyondEnd).toBeNull();
456+
});
457+
458+
test("matchToken consumes and returns on match", async ({evaluate}) => {
459+
const results = await evaluate(() => {
460+
const t = _hyperscript.internals.tokenizer;
461+
const tokens = t.tokenize("foo bar baz");
462+
const r = {};
463+
r.match = tokens.matchToken("foo")?.value ?? null;
464+
r.miss = tokens.matchToken("baz") ?? null; // next is "bar", miss
465+
r.next = tokens.currentToken().value;
466+
r.match2 = tokens.matchToken("bar")?.value ?? null;
467+
return r;
468+
});
469+
expect(results.match).toBe("foo");
470+
expect(results.miss).toBeNull();
471+
expect(results.next).toBe("bar");
472+
expect(results.match2).toBe("bar");
473+
});
474+
475+
test("matchToken honors the follow set", async ({evaluate}) => {
476+
const results = await evaluate(() => {
477+
const t = _hyperscript.internals.tokenizer;
478+
const tokens = t.tokenize("and then");
479+
tokens.pushFollow("and");
480+
const blocked = tokens.matchToken("and") ?? null;
481+
tokens.popFollow();
482+
const allowed = tokens.matchToken("and")?.value ?? null;
483+
return {blocked, allowed};
484+
});
485+
expect(results.blocked).toBeNull();
486+
expect(results.allowed).toBe("and");
487+
});
488+
489+
test("matchOpToken matches operators by value", async ({evaluate}) => {
490+
const results = await evaluate(() => {
491+
const t = _hyperscript.internals.tokenizer;
492+
const tokens = t.tokenize("+ - *");
493+
return [
494+
tokens.matchOpToken("-") ?? null, // next is +, miss
495+
tokens.matchOpToken("+")?.value ?? null,
496+
tokens.matchOpToken("-")?.value ?? null,
497+
tokens.matchOpToken("*")?.value ?? null,
498+
];
499+
});
500+
expect(results[0]).toBeNull();
501+
expect(results[1]).toBe("+");
502+
expect(results[2]).toBe("-");
503+
expect(results[3]).toBe("*");
504+
});
505+
506+
test("matchTokenType matches by type", async ({evaluate}) => {
507+
const results = await evaluate(() => {
508+
const t = _hyperscript.internals.tokenizer;
509+
const tokens = t.tokenize("foo 42");
510+
const r = {};
511+
r.ident = tokens.matchTokenType("IDENTIFIER")?.value ?? null;
512+
r.numMiss = tokens.matchTokenType("STRING") ?? null;
513+
r.numOneOf = tokens.matchTokenType("STRING", "NUMBER")?.value ?? null;
514+
return r;
515+
});
516+
expect(results.ident).toBe("foo");
517+
expect(results.numMiss).toBeNull();
518+
expect(results.numOneOf).toBe("42");
519+
});
520+
521+
test("matchAnyToken and matchAnyOpToken try each option", async ({evaluate}) => {
522+
const results = await evaluate(() => {
523+
const t = _hyperscript.internals.tokenizer;
524+
const tokens = t.tokenize("bar + baz");
525+
return {
526+
anyTok: tokens.matchAnyToken("foo", "bar", "baz")?.value ?? null,
527+
anyOp: tokens.matchAnyOpToken("-", "+")?.value ?? null,
528+
anyTokMiss: tokens.matchAnyToken("foo", "quux") ?? null,
529+
};
530+
});
531+
expect(results.anyTok).toBe("bar");
532+
expect(results.anyOp).toBe("+");
533+
expect(results.anyTokMiss).toBeNull();
534+
});
535+
536+
test("consumeUntil collects tokens up to a marker", async ({evaluate}) => {
537+
const results = await evaluate(() => {
538+
const t = _hyperscript.internals.tokenizer;
539+
const tokens = t.tokenize("a b c end d");
540+
// consumeUntil collects every intervening token, whitespace included
541+
const collected = tokens.consumeUntil("end")
542+
.filter(tok => tok.type !== "WHITESPACE")
543+
.map(tok => tok.value);
544+
const landed = tokens.currentToken().value;
545+
return {collected, landed};
546+
});
547+
expect(results.collected).toEqual(["a", "b", "c"]);
548+
expect(results.landed).toBe("end");
549+
});
550+
551+
test("consumeUntilWhitespace stops at first whitespace", async ({evaluate}) => {
552+
const results = await evaluate(() => {
553+
const t = _hyperscript.internals.tokenizer;
554+
const tokens = t.tokenize("foo.bar more");
555+
const collected = tokens.consumeUntilWhitespace().map(tok => tok.value);
556+
const landed = tokens.currentToken().value;
557+
return {collected, landed};
558+
});
559+
// consumeUntilWhitespace stops at the space between foo.bar and more
560+
expect(results.collected).toEqual(["foo", ".", "bar"]);
561+
expect(results.landed).toBe("more");
562+
});
563+
564+
test("lastMatch returns the last consumed token", async ({evaluate}) => {
565+
const results = await evaluate(() => {
566+
const t = _hyperscript.internals.tokenizer;
567+
const tokens = t.tokenize("foo bar baz");
568+
const r = {};
569+
r.before = tokens.lastMatch() ?? null;
570+
tokens.consumeToken();
571+
r.afterFoo = tokens.lastMatch()?.value ?? null;
572+
tokens.consumeToken();
573+
r.afterBar = tokens.lastMatch()?.value ?? null;
574+
return r;
575+
});
576+
expect(results.before).toBeNull();
577+
expect(results.afterFoo).toBe("foo");
578+
expect(results.afterBar).toBe("bar");
579+
});
580+
581+
test("lastWhitespace reflects whitespace before the current token", async ({evaluate}) => {
582+
const results = await evaluate(() => {
583+
const t = _hyperscript.internals.tokenizer;
584+
const tokens = t.tokenize("foo bar\n\tbaz");
585+
const r = {};
586+
// Before any consume, no whitespace has been consumed yet
587+
r.initial = tokens.lastWhitespace();
588+
tokens.consumeToken(); // foo → consumes trailing whitespace " "
589+
r.afterFoo = tokens.lastWhitespace();
590+
tokens.consumeToken(); // bar → consumes "\n\t"
591+
r.afterBar = tokens.lastWhitespace();
592+
return r;
593+
});
594+
expect(results.initial).toBe("");
595+
expect(results.afterFoo).toBe(" ");
596+
expect(results.afterBar).toBe("\n\t");
597+
});
598+
599+
test("pushFollow/popFollow nest follow-set boundaries", async ({evaluate}) => {
600+
const results = await evaluate(() => {
601+
const t = _hyperscript.internals.tokenizer;
602+
const r = {};
603+
const tokens = t.tokenize("and or not");
604+
tokens.pushFollow("and");
605+
tokens.pushFollow("or");
606+
r.andBlocked = tokens.matchToken("and") ?? null;
607+
tokens.popFollow(); // pops "or"
608+
r.andStillBlocked = tokens.matchToken("and") ?? null;
609+
tokens.popFollow(); // pops "and"
610+
r.andAllowed = tokens.matchToken("and")?.value ?? null;
611+
return r;
612+
});
613+
expect(results.andBlocked).toBeNull();
614+
expect(results.andStillBlocked).toBeNull();
615+
expect(results.andAllowed).toBe("and");
616+
});
617+
618+
test("pushFollows/popFollows push and pop in bulk", async ({evaluate}) => {
619+
const results = await evaluate(() => {
620+
const t = _hyperscript.internals.tokenizer;
621+
const tokens = t.tokenize("and or");
622+
const count = tokens.pushFollows("and", "or");
623+
const blocked = tokens.matchToken("and") ?? null;
624+
tokens.popFollows(count);
625+
const allowed = tokens.matchToken("and")?.value ?? null;
626+
return {count, blocked, allowed};
627+
});
628+
expect(results.count).toBe(2);
629+
expect(results.blocked).toBeNull();
630+
expect(results.allowed).toBe("and");
631+
});
632+
633+
test("clearFollows/restoreFollows round-trip the follow set", async ({evaluate}) => {
634+
const results = await evaluate(() => {
635+
const t = _hyperscript.internals.tokenizer;
636+
const tokens = t.tokenize("and and and");
637+
tokens.pushFollow("and");
638+
const saved = tokens.clearFollows();
639+
const allowedWhileCleared = tokens.matchToken("and")?.value ?? null;
640+
tokens.restoreFollows(saved);
641+
const blockedAfterRestore = tokens.matchToken("and") ?? null;
642+
return {allowedWhileCleared, blockedAfterRestore};
643+
});
644+
expect(results.allowedWhileCleared).toBe("and");
645+
expect(results.blockedAfterRestore).toBeNull();
646+
});
647+
648+
});

0 commit comments

Comments
 (0)