First tokenization exerice.

This commit is contained in:
Chris Boesch 2023-06-26 00:54:39 +02:00
parent 371beb1658
commit ec5e15ab67
3 changed files with 175 additions and 0 deletions

View file

@ -0,0 +1,4 @@
139c139
< var it = std.mem.tokenize(u8, poem, ???);
---
> var it = std.mem.tokenize(u8, poem, " ,;!\n");