First tokenization exerice.
This commit is contained in:
parent
371beb1658
commit
ec5e15ab67
3 changed files with 175 additions and 0 deletions
4
patches/patches/103_tokenization.patch
Normal file
4
patches/patches/103_tokenization.patch
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
139c139
|
||||
< var it = std.mem.tokenize(u8, poem, ???);
|
||||
---
|
||||
> var it = std.mem.tokenize(u8, poem, " ,;!\n");
|
||||
Loading…
Add table
Add a link
Reference in a new issue