1 | #!/usr/bin/env python2
|
2 | """
|
3 | prompt_test.py: Tests for prompt.py
|
4 | """
|
5 | from __future__ import print_function
|
6 |
|
7 | import unittest
|
8 |
|
9 | from core import state
|
10 | from core import test_lib
|
11 | from frontend import match
|
12 | from osh import prompt # module under test
|
13 |
|
14 |
|
15 | class PromptTest(unittest.TestCase):
|
16 |
|
17 | def setUp(self):
|
18 | arena = test_lib.MakeArena('<ui_test.py>')
|
19 | mem = state.Mem('', [], arena, [], {})
|
20 | parse_ctx = test_lib.InitParseContext()
|
21 | self.p = prompt.Evaluator('osh', '0.0.0', parse_ctx, mem)
|
22 | # note: this has a separate 'mem' object
|
23 | self.p.word_ev = test_lib.InitWordEvaluator()
|
24 |
|
25 | def testEvaluator(self):
|
26 | # Regression for caching bug!
|
27 | self.assertEqual('foo', self.p.EvalPrompt('foo'))
|
28 | self.assertEqual('foo', self.p.EvalPrompt('foo'))
|
29 |
|
30 | def testNoEscapes(self):
|
31 | for prompt_str in ["> ", "osh>", "[[]][[]][][]]][["]:
|
32 | self.assertEqual(self.p.EvalPrompt(prompt_str), prompt_str)
|
33 |
|
34 | def testValidEscapes(self):
|
35 | for prompt_str in [
|
36 | "\[\033[01;34m\]user\[\033[00m\] >", r"\[\]\[\]\[\]",
|
37 | r"\[\] hi \[hi\] \[\] hello"
|
38 | ]:
|
39 | self.assertEqual(
|
40 | self.p.EvalPrompt(prompt_str),
|
41 | prompt_str.replace(r"\[", "\x01").replace(r"\]", "\x02"))
|
42 |
|
43 | def testInvalidEscapes(self):
|
44 | for invalid_prompt in [
|
45 | r"\[\[",
|
46 | r"\[\]\[\]\]",
|
47 | r"\]\]",
|
48 | r"almost valid \]",
|
49 | r"\[almost valid",
|
50 | r"\]\[", # goes negative!
|
51 | ]:
|
52 | tokens = match.Ps1Tokens(invalid_prompt)
|
53 | self.assertEqual(r'<Error: Unbalanced \[ and \]> ',
|
54 | self.p._ReplaceBackslashCodes(tokens))
|
55 |
|
56 |
|
57 | if __name__ == '__main__':
|
58 | unittest.main()
|