1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.commons.lang3.text;
19
20 import static org.junit.jupiter.api.Assertions.assertEquals;
21 import static org.junit.jupiter.api.Assertions.assertFalse;
22 import static org.junit.jupiter.api.Assertions.assertNotSame;
23 import static org.junit.jupiter.api.Assertions.assertNull;
24 import static org.junit.jupiter.api.Assertions.assertThrows;
25 import static org.junit.jupiter.api.Assertions.assertTrue;
26
27 import java.util.Arrays;
28 import java.util.Collections;
29 import java.util.List;
30 import java.util.NoSuchElementException;
31
32 import org.apache.commons.lang3.AbstractLangTest;
33 import org.apache.commons.lang3.ArrayUtils;
34 import org.junit.jupiter.api.Test;
35
36
37
38
39 @Deprecated
40 public class StrTokenizerTest extends AbstractLangTest {
41
42 private static final String CSV_SIMPLE_FIXTURE = "A,b,c";
43
44 private static final String TSV_SIMPLE_FIXTURE = "A\tb\tc";
45
46 private void checkClone(final StrTokenizer tokenizer) {
47 assertNotSame(StrTokenizer.getCSVInstance(), tokenizer);
48 assertNotSame(StrTokenizer.getTSVInstance(), tokenizer);
49 }
50
51 @Test
52 public void test1() {
53
54 final String input = "a;b;c;\"d;\"\"e\";f; ; ; ";
55 final StrTokenizer tok = new StrTokenizer(input);
56 tok.setDelimiterChar(';');
57 tok.setQuoteChar('"');
58 tok.setIgnoredMatcher(StrMatcher.trimMatcher());
59 tok.setIgnoreEmptyTokens(false);
60 final String[] tokens = tok.getTokenArray();
61
62 final String[] expected = {"a", "b", "c", "d;\"e", "f", "", "", ""};
63
64 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
65 for (int i = 0; i < expected.length; i++) {
66 assertEquals(expected[i], tokens[i],
67 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
68 }
69
70 }
71
72 @Test
73 public void test2() {
74
75 final String input = "a;b;c ;\"d;\"\"e\";f; ; ;";
76 final StrTokenizer tok = new StrTokenizer(input);
77 tok.setDelimiterChar(';');
78 tok.setQuoteChar('"');
79 tok.setIgnoredMatcher(StrMatcher.noneMatcher());
80 tok.setIgnoreEmptyTokens(false);
81 final String[] tokens = tok.getTokenArray();
82
83 final String[] expected = {"a", "b", "c ", "d;\"e", "f", " ", " ", ""};
84
85 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
86 for (int i = 0; i < expected.length; i++) {
87 assertEquals(expected[i], tokens[i],
88 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
89 }
90
91 }
92
93 @Test
94 public void test3() {
95
96 final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
97 final StrTokenizer tok = new StrTokenizer(input);
98 tok.setDelimiterChar(';');
99 tok.setQuoteChar('"');
100 tok.setIgnoredMatcher(StrMatcher.noneMatcher());
101 tok.setIgnoreEmptyTokens(false);
102 final String[] tokens = tok.getTokenArray();
103
104 final String[] expected = {"a", "b", " c", "d;\"e", "f", " ", " ", ""};
105
106 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
107 for (int i = 0; i < expected.length; i++) {
108 assertEquals(expected[i], tokens[i],
109 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
110 }
111
112 }
113
114 @Test
115 public void test4() {
116
117 final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
118 final StrTokenizer tok = new StrTokenizer(input);
119 tok.setDelimiterChar(';');
120 tok.setQuoteChar('"');
121 tok.setIgnoredMatcher(StrMatcher.trimMatcher());
122 tok.setIgnoreEmptyTokens(true);
123 final String[] tokens = tok.getTokenArray();
124
125 final String[] expected = {"a", "b", "c", "d;\"e", "f"};
126
127 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
128 for (int i = 0; i < expected.length; i++) {
129 assertEquals(expected[i], tokens[i],
130 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
131 }
132
133 }
134
135 @Test
136 public void test5() {
137
138 final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
139 final StrTokenizer tok = new StrTokenizer(input);
140 tok.setDelimiterChar(';');
141 tok.setQuoteChar('"');
142 tok.setIgnoredMatcher(StrMatcher.trimMatcher());
143 tok.setIgnoreEmptyTokens(false);
144 tok.setEmptyTokenAsNull(true);
145 final String[] tokens = tok.getTokenArray();
146
147 final String[] expected = {"a", "b", "c", "d;\"e", "f", null, null, null};
148
149 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
150 for (int i = 0; i < expected.length; i++) {
151 assertEquals(expected[i], tokens[i],
152 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
153 }
154
155 }
156
157 @Test
158 public void test6() {
159
160 final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
161 final StrTokenizer tok = new StrTokenizer(input);
162 tok.setDelimiterChar(';');
163 tok.setQuoteChar('"');
164 tok.setIgnoredMatcher(StrMatcher.trimMatcher());
165 tok.setIgnoreEmptyTokens(false);
166
167 final String[] tokens = tok.getTokenArray();
168
169 final String[] expected = {"a", "b", " c", "d;\"e", "f", null, null, null};
170
171 int nextCount = 0;
172 while (tok.hasNext()) {
173 tok.next();
174 nextCount++;
175 }
176
177 int prevCount = 0;
178 while (tok.hasPrevious()) {
179 tok.previous();
180 prevCount++;
181 }
182
183 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
184
185 assertEquals(nextCount, expected.length, "could not cycle through entire token list" + " using the 'hasNext' and 'next' methods");
186
187 assertEquals(prevCount, expected.length, "could not cycle through entire token list" + " using the 'hasPrevious' and 'previous' methods");
188
189 }
190
191 @Test
192 public void test7() {
193
194 final String input = "a b c \"d e\" f ";
195 final StrTokenizer tok = new StrTokenizer(input);
196 tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
197 tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
198 tok.setIgnoredMatcher(StrMatcher.noneMatcher());
199 tok.setIgnoreEmptyTokens(false);
200 final String[] tokens = tok.getTokenArray();
201
202 final String[] expected = {"a", "", "", "b", "c", "d e", "f", ""};
203
204 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
205 for (int i = 0; i < expected.length; i++) {
206 assertEquals(expected[i], tokens[i],
207 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
208 }
209
210 }
211
212 @Test
213 public void test8() {
214
215 final String input = "a b c \"d e\" f ";
216 final StrTokenizer tok = new StrTokenizer(input);
217 tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
218 tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
219 tok.setIgnoredMatcher(StrMatcher.noneMatcher());
220 tok.setIgnoreEmptyTokens(true);
221 final String[] tokens = tok.getTokenArray();
222
223 final String[] expected = {"a", "b", "c", "d e", "f"};
224
225 assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
226 for (int i = 0; i < expected.length; i++) {
227 assertEquals(expected[i], tokens[i],
228 "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
229 }
230
231 }
232
233 @Test
234 public void testBasic1() {
235 final String input = "a b c";
236 final StrTokenizer tok = new StrTokenizer(input);
237 assertEquals("a", tok.next());
238 assertEquals("b", tok.next());
239 assertEquals("c", tok.next());
240 assertFalse(tok.hasNext());
241 }
242
243 @Test
244 public void testBasic2() {
245 final String input = "a \nb\fc";
246 final StrTokenizer tok = new StrTokenizer(input);
247 assertEquals("a", tok.next());
248 assertEquals("b", tok.next());
249 assertEquals("c", tok.next());
250 assertFalse(tok.hasNext());
251 }
252
253 @Test
254 public void testBasic3() {
255 final String input = "a \nb\u0001\fc";
256 final StrTokenizer tok = new StrTokenizer(input);
257 assertEquals("a", tok.next());
258 assertEquals("b\u0001", tok.next());
259 assertEquals("c", tok.next());
260 assertFalse(tok.hasNext());
261 }
262
263 @Test
264 public void testBasic4() {
265 final String input = "a \"b\" c";
266 final StrTokenizer tok = new StrTokenizer(input);
267 assertEquals("a", tok.next());
268 assertEquals("\"b\"", tok.next());
269 assertEquals("c", tok.next());
270 assertFalse(tok.hasNext());
271 }
272
273 @Test
274 public void testBasic5() {
275 final String input = "a:b':c";
276 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
277 assertEquals("a", tok.next());
278 assertEquals("b'", tok.next());
279 assertEquals("c", tok.next());
280 assertFalse(tok.hasNext());
281 }
282
283 @Test
284 public void testBasicDelim1() {
285 final String input = "a:b:c";
286 final StrTokenizer tok = new StrTokenizer(input, ':');
287 assertEquals("a", tok.next());
288 assertEquals("b", tok.next());
289 assertEquals("c", tok.next());
290 assertFalse(tok.hasNext());
291 }
292
293 @Test
294 public void testBasicDelim2() {
295 final String input = "a:b:c";
296 final StrTokenizer tok = new StrTokenizer(input, ',');
297 assertEquals("a:b:c", tok.next());
298 assertFalse(tok.hasNext());
299 }
300
301 @Test
302 public void testBasicEmpty1() {
303 final String input = "a b c";
304 final StrTokenizer tok = new StrTokenizer(input);
305 tok.setIgnoreEmptyTokens(false);
306 assertEquals("a", tok.next());
307 assertEquals("", tok.next());
308 assertEquals("b", tok.next());
309 assertEquals("c", tok.next());
310 assertFalse(tok.hasNext());
311 }
312
313 @Test
314 public void testBasicEmpty2() {
315 final String input = "a b c";
316 final StrTokenizer tok = new StrTokenizer(input);
317 tok.setIgnoreEmptyTokens(false);
318 tok.setEmptyTokenAsNull(true);
319 assertEquals("a", tok.next());
320 assertNull(tok.next());
321 assertEquals("b", tok.next());
322 assertEquals("c", tok.next());
323 assertFalse(tok.hasNext());
324 }
325
326 @Test
327 public void testBasicIgnoreTrimmed1() {
328 final String input = "a: bIGNOREc : ";
329 final StrTokenizer tok = new StrTokenizer(input, ':');
330 tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
331 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
332 tok.setIgnoreEmptyTokens(false);
333 tok.setEmptyTokenAsNull(true);
334 assertEquals("a", tok.next());
335 assertEquals("bc", tok.next());
336 assertNull(tok.next());
337 assertFalse(tok.hasNext());
338 }
339
340 @Test
341 public void testBasicIgnoreTrimmed2() {
342 final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
343 final StrTokenizer tok = new StrTokenizer(input, ':');
344 tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
345 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
346 tok.setIgnoreEmptyTokens(false);
347 tok.setEmptyTokenAsNull(true);
348 assertEquals("a", tok.next());
349 assertEquals("bc", tok.next());
350 assertNull(tok.next());
351 assertFalse(tok.hasNext());
352 }
353
354 @Test
355 public void testBasicIgnoreTrimmed3() {
356 final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
357 final StrTokenizer tok = new StrTokenizer(input, ':');
358 tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
359 tok.setIgnoreEmptyTokens(false);
360 tok.setEmptyTokenAsNull(true);
361 assertEquals("a", tok.next());
362 assertEquals(" bc ", tok.next());
363 assertEquals(" ", tok.next());
364 assertFalse(tok.hasNext());
365 }
366
367 @Test
368 public void testBasicIgnoreTrimmed4() {
369 final String input = "IGNOREaIGNORE: IGNORE 'bIGNOREc'IGNORE'd' IGNORE : IGNORE ";
370 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
371 tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
372 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
373 tok.setIgnoreEmptyTokens(false);
374 tok.setEmptyTokenAsNull(true);
375 assertEquals("a", tok.next());
376 assertEquals("bIGNOREcd", tok.next());
377 assertNull(tok.next());
378 assertFalse(tok.hasNext());
379 }
380
381 @Test
382 public void testBasicQuoted1() {
383 final String input = "a 'b' c";
384 final StrTokenizer tok = new StrTokenizer(input, ' ', '\'');
385 assertEquals("a", tok.next());
386 assertEquals("b", tok.next());
387 assertEquals("c", tok.next());
388 assertFalse(tok.hasNext());
389 }
390
391 @Test
392 public void testBasicQuoted2() {
393 final String input = "a:'b':";
394 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
395 tok.setIgnoreEmptyTokens(false);
396 tok.setEmptyTokenAsNull(true);
397 assertEquals("a", tok.next());
398 assertEquals("b", tok.next());
399 assertNull(tok.next());
400 assertFalse(tok.hasNext());
401 }
402
403 @Test
404 public void testBasicQuoted3() {
405 final String input = "a:'b''c'";
406 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
407 tok.setIgnoreEmptyTokens(false);
408 tok.setEmptyTokenAsNull(true);
409 assertEquals("a", tok.next());
410 assertEquals("b'c", tok.next());
411 assertFalse(tok.hasNext());
412 }
413
414 @Test
415 public void testBasicQuoted4() {
416 final String input = "a: 'b' 'c' :d";
417 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
418 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
419 tok.setIgnoreEmptyTokens(false);
420 tok.setEmptyTokenAsNull(true);
421 assertEquals("a", tok.next());
422 assertEquals("b c", tok.next());
423 assertEquals("d", tok.next());
424 assertFalse(tok.hasNext());
425 }
426
427 @Test
428 public void testBasicQuoted5() {
429 final String input = "a: 'b'x'c' :d";
430 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
431 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
432 tok.setIgnoreEmptyTokens(false);
433 tok.setEmptyTokenAsNull(true);
434 assertEquals("a", tok.next());
435 assertEquals("bxc", tok.next());
436 assertEquals("d", tok.next());
437 assertFalse(tok.hasNext());
438 }
439
440 @Test
441 public void testBasicQuoted6() {
442 final String input = "a:'b'\"c':d";
443 final StrTokenizer tok = new StrTokenizer(input, ':');
444 tok.setQuoteMatcher(StrMatcher.quoteMatcher());
445 assertEquals("a", tok.next());
446 assertEquals("b\"c:d", tok.next());
447 assertFalse(tok.hasNext());
448 }
449
450 @Test
451 public void testBasicQuoted7() {
452 final String input = "a:\"There's a reason here\":b";
453 final StrTokenizer tok = new StrTokenizer(input, ':');
454 tok.setQuoteMatcher(StrMatcher.quoteMatcher());
455 assertEquals("a", tok.next());
456 assertEquals("There's a reason here", tok.next());
457 assertEquals("b", tok.next());
458 assertFalse(tok.hasNext());
459 }
460
461 @Test
462 public void testBasicQuotedTrimmed1() {
463 final String input = "a: 'b' :";
464 final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
465 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
466 tok.setIgnoreEmptyTokens(false);
467 tok.setEmptyTokenAsNull(true);
468 assertEquals("a", tok.next());
469 assertEquals("b", tok.next());
470 assertNull(tok.next());
471 assertFalse(tok.hasNext());
472 }
473
474 @Test
475 public void testBasicTrimmed1() {
476 final String input = "a: b : ";
477 final StrTokenizer tok = new StrTokenizer(input, ':');
478 tok.setTrimmerMatcher(StrMatcher.trimMatcher());
479 tok.setIgnoreEmptyTokens(false);
480 tok.setEmptyTokenAsNull(true);
481 assertEquals("a", tok.next());
482 assertEquals("b", tok.next());
483 assertNull(tok.next());
484 assertFalse(tok.hasNext());
485 }
486
487 @Test
488 public void testBasicTrimmed2() {
489 final String input = "a: b :";
490 final StrTokenizer tok = new StrTokenizer(input, ':');
491 tok.setTrimmerMatcher(StrMatcher.stringMatcher(" "));
492 tok.setIgnoreEmptyTokens(false);
493 tok.setEmptyTokenAsNull(true);
494 assertEquals("a", tok.next());
495 assertEquals("b", tok.next());
496 assertNull(tok.next());
497 assertFalse(tok.hasNext());
498 }
499
500 @Test
501 public void testChaining() {
502 final StrTokenizer tok = new StrTokenizer();
503 assertEquals(tok, tok.reset());
504 assertEquals(tok, tok.reset(""));
505 assertEquals(tok, tok.reset(new char[0]));
506 assertEquals(tok, tok.setDelimiterChar(' '));
507 assertEquals(tok, tok.setDelimiterString(" "));
508 assertEquals(tok, tok.setDelimiterMatcher(null));
509 assertEquals(tok, tok.setQuoteChar(' '));
510 assertEquals(tok, tok.setQuoteMatcher(null));
511 assertEquals(tok, tok.setIgnoredChar(' '));
512 assertEquals(tok, tok.setIgnoredMatcher(null));
513 assertEquals(tok, tok.setTrimmerMatcher(null));
514 assertEquals(tok, tok.setEmptyTokenAsNull(false));
515 assertEquals(tok, tok.setIgnoreEmptyTokens(false));
516 }
517
518
519
520
521
522 @Test
523 public void testCloneNotSupportedException() {
524 final Object notCloned = new StrTokenizer() {
525 @Override
526 Object cloneReset() throws CloneNotSupportedException {
527 throw new CloneNotSupportedException("test");
528 }
529 }.clone();
530 assertNull(notCloned);
531 }
532
533 @Test
534 public void testCloneNull() {
535 final StrTokenizer tokenizer = new StrTokenizer((char[]) null);
536
537 assertNull(tokenizer.nextToken());
538 tokenizer.reset();
539 assertNull(tokenizer.nextToken());
540
541 final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
542 tokenizer.reset();
543 assertNull(tokenizer.nextToken());
544 assertNull(clonedTokenizer.nextToken());
545 }
546
547 @Test
548 public void testCloneReset() {
549 final char[] input = {'a'};
550 final StrTokenizer tokenizer = new StrTokenizer(input);
551
552 assertEquals("a", tokenizer.nextToken());
553 tokenizer.reset(input);
554 assertEquals("a", tokenizer.nextToken());
555
556 final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
557 input[0] = 'b';
558 tokenizer.reset(input);
559 assertEquals("b", tokenizer.nextToken());
560 assertEquals("a", clonedTokenizer.nextToken());
561 }
562
563 @Test
564 public void testConstructor_charArray() {
565 StrTokenizer tok = new StrTokenizer("a b".toCharArray());
566 assertEquals("a", tok.next());
567 assertEquals("b", tok.next());
568 assertFalse(tok.hasNext());
569
570 tok = new StrTokenizer(new char[0]);
571 assertFalse(tok.hasNext());
572
573 tok = new StrTokenizer((char[]) null);
574 assertFalse(tok.hasNext());
575 }
576
577 @Test
578 public void testConstructor_charArray_char() {
579 StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ');
580 assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
581 assertEquals("a", tok.next());
582 assertEquals("b", tok.next());
583 assertFalse(tok.hasNext());
584
585 tok = new StrTokenizer(new char[0], ' ');
586 assertFalse(tok.hasNext());
587
588 tok = new StrTokenizer((char[]) null, ' ');
589 assertFalse(tok.hasNext());
590 }
591
592 @Test
593 public void testConstructor_charArray_char_char() {
594 StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ', '"');
595 assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
596 assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
597 assertEquals("a", tok.next());
598 assertEquals("b", tok.next());
599 assertFalse(tok.hasNext());
600
601 tok = new StrTokenizer(new char[0], ' ', '"');
602 assertFalse(tok.hasNext());
603
604 tok = new StrTokenizer((char[]) null, ' ', '"');
605 assertFalse(tok.hasNext());
606 }
607
608 @Test
609 public void testConstructor_String() {
610 StrTokenizer tok = new StrTokenizer("a b");
611 assertEquals("a", tok.next());
612 assertEquals("b", tok.next());
613 assertFalse(tok.hasNext());
614
615 tok = new StrTokenizer("");
616 assertFalse(tok.hasNext());
617
618 tok = new StrTokenizer((String) null);
619 assertFalse(tok.hasNext());
620 }
621
622 @Test
623 public void testConstructor_String_char() {
624 StrTokenizer tok = new StrTokenizer("a b", ' ');
625 assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
626 assertEquals("a", tok.next());
627 assertEquals("b", tok.next());
628 assertFalse(tok.hasNext());
629
630 tok = new StrTokenizer("", ' ');
631 assertFalse(tok.hasNext());
632
633 tok = new StrTokenizer((String) null, ' ');
634 assertFalse(tok.hasNext());
635 }
636
637 @Test
638 public void testConstructor_String_char_char() {
639 StrTokenizer tok = new StrTokenizer("a b", ' ', '"');
640 assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
641 assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
642 assertEquals("a", tok.next());
643 assertEquals("b", tok.next());
644 assertFalse(tok.hasNext());
645
646 tok = new StrTokenizer("", ' ', '"');
647 assertFalse(tok.hasNext());
648
649 tok = new StrTokenizer((String) null, ' ', '"');
650 assertFalse(tok.hasNext());
651 }
652
653 private void testCSV(final String data) {
654 this.testXSVAbc(StrTokenizer.getCSVInstance(data));
655 this.testXSVAbc(StrTokenizer.getCSVInstance(data.toCharArray()));
656 }
657
658 @Test
659 public void testCSVEmpty() {
660 this.testEmpty(StrTokenizer.getCSVInstance());
661 this.testEmpty(StrTokenizer.getCSVInstance(""));
662 }
663
664 @Test
665 public void testCSVSimple() {
666 this.testCSV(CSV_SIMPLE_FIXTURE);
667 }
668
669 @Test
670 public void testCSVSimpleNeedsTrim() {
671 this.testCSV(" " + CSV_SIMPLE_FIXTURE);
672 this.testCSV(" \n\t " + CSV_SIMPLE_FIXTURE);
673 this.testCSV(" \n " + CSV_SIMPLE_FIXTURE + "\n\n\r");
674 }
675
676 void testEmpty(final StrTokenizer tokenizer) {
677 this.checkClone(tokenizer);
678 assertFalse(tokenizer.hasNext());
679 assertFalse(tokenizer.hasPrevious());
680 assertNull(tokenizer.nextToken());
681 assertEquals(0, tokenizer.size());
682 assertThrows(NoSuchElementException.class, tokenizer::next);
683 }
684
685 @Test
686 public void testGetContent() {
687 final String input = "a b c \"d e\" f ";
688 StrTokenizer tok = new StrTokenizer(input);
689 assertEquals(input, tok.getContent());
690
691 tok = new StrTokenizer(input.toCharArray());
692 assertEquals(input, tok.getContent());
693
694 tok = new StrTokenizer();
695 assertNull(tok.getContent());
696 }
697
698 @Test
699 public void testIteration() {
700 final StrTokenizer tkn = new StrTokenizer("a b c");
701 assertFalse(tkn.hasPrevious());
702 assertThrows(NoSuchElementException.class, tkn::previous);
703 assertTrue(tkn.hasNext());
704
705 assertEquals("a", tkn.next());
706 assertThrows(UnsupportedOperationException.class, tkn::remove);
707 assertThrows(UnsupportedOperationException.class, () -> tkn.set("x"));
708 assertThrows(UnsupportedOperationException.class, () -> tkn.add("y"));
709 assertTrue(tkn.hasPrevious());
710 assertTrue(tkn.hasNext());
711
712 assertEquals("b", tkn.next());
713 assertTrue(tkn.hasPrevious());
714 assertTrue(tkn.hasNext());
715
716 assertEquals("c", tkn.next());
717 assertTrue(tkn.hasPrevious());
718 assertFalse(tkn.hasNext());
719
720 assertThrows(NoSuchElementException.class, tkn::next);
721 assertTrue(tkn.hasPrevious());
722 assertFalse(tkn.hasNext());
723 }
724
725 @Test
726 public void testListArray() {
727 final String input = "a b c";
728 final StrTokenizer tok = new StrTokenizer(input);
729 final String[] array = tok.getTokenArray();
730 final List<?> list = tok.getTokenList();
731
732 assertEquals(Arrays.asList(array), list);
733 assertEquals(3, list.size());
734 }
735
736 @Test
737 public void testReset() {
738 final StrTokenizer tok = new StrTokenizer("a b c");
739 assertEquals("a", tok.next());
740 assertEquals("b", tok.next());
741 assertEquals("c", tok.next());
742 assertFalse(tok.hasNext());
743
744 tok.reset();
745 assertEquals("a", tok.next());
746 assertEquals("b", tok.next());
747 assertEquals("c", tok.next());
748 assertFalse(tok.hasNext());
749 }
750
751 @Test
752 public void testReset_charArray() {
753 final StrTokenizer tok = new StrTokenizer("x x x");
754
755 final char[] array = {'a', 'b', 'c'};
756 tok.reset(array);
757 assertEquals("abc", tok.next());
758 assertFalse(tok.hasNext());
759
760 tok.reset((char[]) null);
761 assertFalse(tok.hasNext());
762 }
763
764 @Test
765 public void testReset_String() {
766 final StrTokenizer tok = new StrTokenizer("x x x");
767 tok.reset("d e");
768 assertEquals("d", tok.next());
769 assertEquals("e", tok.next());
770 assertFalse(tok.hasNext());
771
772 tok.reset((String) null);
773 assertFalse(tok.hasNext());
774 }
775
776 @Test
777 public void testTokenizeSubclassInputChange() {
778 final StrTokenizer tkn = new StrTokenizer("a b c d e") {
779 @Override
780 protected List<String> tokenize(final char[] chars, final int offset, final int count) {
781 return super.tokenize("w x y z".toCharArray(), 2, 5);
782 }
783 };
784 assertEquals("x", tkn.next());
785 assertEquals("y", tkn.next());
786 }
787
788 @Test
789 public void testTokenizeSubclassOutputChange() {
790 final StrTokenizer tkn = new StrTokenizer("a b c") {
791 @Override
792 protected List<String> tokenize(final char[] chars, final int offset, final int count) {
793 final List<String> list = super.tokenize(chars, offset, count);
794 Collections.reverse(list);
795 return list;
796 }
797 };
798 assertEquals("c", tkn.next());
799 assertEquals("b", tkn.next());
800 assertEquals("a", tkn.next());
801 }
802
803 @Test
804 public void testToString() {
805 final StrTokenizer tkn = new StrTokenizer("a b c d e");
806 assertEquals("StrTokenizer[not tokenized yet]", tkn.toString());
807 tkn.next();
808 assertEquals("StrTokenizer[a, b, c, d, e]", tkn.toString());
809 }
810
811 @Test
812 public void testTSV() {
813 this.testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE));
814 this.testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE.toCharArray()));
815 }
816
817 @Test
818 public void testTSVEmpty() {
819 this.testEmpty(StrTokenizer.getTSVInstance());
820 this.testEmpty(StrTokenizer.getTSVInstance(""));
821 }
822
823 void testXSVAbc(final StrTokenizer tokenizer) {
824 this.checkClone(tokenizer);
825 assertEquals(-1, tokenizer.previousIndex());
826 assertEquals(0, tokenizer.nextIndex());
827 assertNull(tokenizer.previousToken());
828 assertEquals("A", tokenizer.nextToken());
829 assertEquals(1, tokenizer.nextIndex());
830 assertEquals("b", tokenizer.nextToken());
831 assertEquals(2, tokenizer.nextIndex());
832 assertEquals("c", tokenizer.nextToken());
833 assertEquals(3, tokenizer.nextIndex());
834 assertNull(tokenizer.nextToken());
835 assertEquals(3, tokenizer.nextIndex());
836 assertEquals("c", tokenizer.previousToken());
837 assertEquals(2, tokenizer.nextIndex());
838 assertEquals("b", tokenizer.previousToken());
839 assertEquals(1, tokenizer.nextIndex());
840 assertEquals("A", tokenizer.previousToken());
841 assertEquals(0, tokenizer.nextIndex());
842 assertNull(tokenizer.previousToken());
843 assertEquals(0, tokenizer.nextIndex());
844 assertEquals(-1, tokenizer.previousIndex());
845 assertEquals(3, tokenizer.size());
846 }
847
848 }