context("tokenizer") test_that("tokenizer extracts correct representation", { tokens <- tokenize('E > f [a~="y\\"x"]:nth(/* fu /]* */-3.7)') tokens <- unlist(lapply(tokens, function(x) x$repr())) expected_tokens <- c("", "", "' at 3>", "", "", "", "", "", "", "", "", "", "", "", "", "") expect_that(tokens, equals(expected_tokens)) })