[jsinterp] Adding handling lineterminator

- adds `jsgrammar.LINETERMINATORSEQ_RE`
- lexer `tstream.TokenStream` checks for lineterminators in tokens
- adds `tstream.Token`
- refractors `tstream.TokenStream` and `jsparser.Parser` and to use it
This commit is contained in:
sulyi 2018-06-10 22:27:22 +02:00
parent a8c640e1b5
commit a33b47e485
31 changed files with 921 additions and 888 deletions

View File

@ -1,79 +1,79 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS
tests = [ tests = [
{'code': 'function f() { var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2] = 7; return x; }', {'code': 'function f() { var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2] = 7; return x; }',
'asserts': [{'value': [5, 2, 7], 'call': ('f',)}], 'asserts': [{'value': [5, 2, 7], 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, (TokenTypes.VAR,
zip(['x'], zip(['x'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ARRAY, [ (TokenTypes.MEMBER, (TokenTypes.ARRAY, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 2), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 3), None, None)]), None) (TokenTypes.MEMBER, (TokenTypes.INT, 3), None, None)]), None)
]), None, None), ]), None, None),
]), ]),
None) None)
]) ])
), ),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'),
None, None,
(Token.ELEM, (TokenTypes.ELEM,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]),
None) None)
]), ]),
None)) None))
]), ]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 4), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 4), None, None)]), None)
) )
]), ]),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'),
None, None,
(Token.ELEM, (Token.EXPR, [ (TokenTypes.ELEM, (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]),
None) None)
]), None)) ]), None))
]), ]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 5), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 5), None, None)]), None))
]), ]),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'),
None, None,
(Token.ELEM, (Token.EXPR, [ (TokenTypes.ELEM, (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 2), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)]),
None) None)
]), None)) ]), None))
]), ]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 7), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 7), None, None)]), None))
]), ]),
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]), None)
]) ])
) )
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS, _ASSIGN_OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS, _ASSIGN_OPERATORS
tests = [ tests = [
@ -8,30 +8,30 @@ tests = [
'code': 'function f() { var x = 20; x = 30 + 1; return x; }', 'code': 'function f() { var x = 20; x = 30 + 1; return x; }',
'asserts': [{'value': 31, 'call': ('f',)}], 'asserts': [{'value': 31, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, zip( (TokenTypes.VAR, zip(
['x'], ['x'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 20), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 20), None, None)]),
None)] None)]
)), )),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]),
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 30), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 30), None, None),
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.OP, _OPERATORS['+'][1])]), (TokenTypes.OP, _OPERATORS['+'][1])]),
None)) None))
]), ]),
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None) (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)
]), None) ]), None)
])) ]))
]) ])

View File

@ -1,18 +1,18 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
tests = [ tests = [
{ {
'code': 'function f() { return 42; }', 'code': 'function f() { return 42; }',
'asserts': [{'value': 42, 'call': ('f',)}], 'asserts': [{'value': 42, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 42), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 42), None, None)]),
None) None)
])) ]))
]) ])
@ -21,7 +21,7 @@ tests = [
{ {
'code': 'function x() {;}', 'code': 'function x() {;}',
'asserts': [{'value': None, 'call': ('x',)}], 'asserts': [{'value': None, 'call': ('x',)}],
'ast': [(Token.FUNC, 'x', [], [None])] 'ast': [(TokenTypes.FUNC, 'x', [], [None])]
}, },
{ {
# FIXME: function expression needs to be implemented # FIXME: function expression needs to be implemented

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _RELATIONS from youtube_dl.jsinterp2.tstream import _RELATIONS
skip = { skip = {
@ -20,17 +20,17 @@ tests = [
''', ''',
'asserts': [{'value': True, 'call': ('a', 1)}, {'value': False, 'call': ('a', 0)}], 'asserts': [{'value': True, 'call': ('a', 1)}, {'value': False, 'call': ('a', 0)}],
'ast': [ 'ast': [
(Token.FUNC, 'a', ['x'], [ (TokenTypes.FUNC, 'a', ['x'], [
(Token.IF, (TokenTypes.IF,
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.MEMBER, (Token.INT, 0), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None),
(Token.REL, _RELATIONS['>'][1]) (TokenTypes.REL, _RELATIONS['>'][1])
]), None)]), ]), None)]),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.BOOL, True), None, None)]), None)])), (TokenTypes.MEMBER, (TokenTypes.BOOL, True), None, None)]), None)])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.BOOL, False), None, None)]), None)]))) (TokenTypes.MEMBER, (TokenTypes.BOOL, False), None, None)]), None)])))
]) ])
] ]
} }

View File

@ -1,24 +1,24 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
tests = [ tests = [
{'code': 'function x4(a){return 2*a+1;}', {'code': 'function x4(a){return 2*a+1;}',
'asserts': [{'value': 7, 'call': ('x4', 3)}], 'asserts': [{'value': 7, 'call': ('x4', 3)}],
'ast': [ 'ast': [
(Token.FUNC, 'x4', ['a'], [ (TokenTypes.FUNC, 'x4', ['a'], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
# Reverse Polish Notation! # Reverse Polish Notation!
(Token.MEMBER, (Token.INT, 2), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None),
(Token.MEMBER, (Token.ID, 'a'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None),
(Token.OP, _OPERATORS['*'][1]), (TokenTypes.OP, _OPERATORS['*'][1]),
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), ]),
None) None)
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
tests = [ tests = [
@ -12,26 +12,26 @@ tests = [
''', ''',
'asserts': [{'value': 5, 'call': ('z',)}], 'asserts': [{'value': 5, 'call': ('z',)}],
'ast': [ 'ast': [
(Token.FUNC, 'x', [], [ (TokenTypes.FUNC, 'x', [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 2), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)]), None)
])) ]))
]), ]),
(Token.FUNC, 'y', ['a'], [ (TokenTypes.FUNC, 'y', ['a'], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, (Token.CALL, [], None)), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, (TokenTypes.CALL, [], None)),
(Token.MEMBER, (Token.ID, 'a'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None) ]), None)
])) ]))
]), ]),
(Token.FUNC, 'z', [], [ (TokenTypes.FUNC, 'z', [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'y'), None, (Token.CALL, [ (TokenTypes.MEMBER, (TokenTypes.ID, 'y'), None, (TokenTypes.CALL, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 3), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 3), None, None)]), None)
], None)) ], None))
]), None) ]), None)
]) ])
@ -42,13 +42,13 @@ tests = [
'code': 'function x(a) { return a.split(""); }', 'code': 'function x(a) { return a.split(""); }',
'asserts': [{'value': ["a", "b", "c"], 'call': ('x', "abc")}], 'asserts': [{'value': ["a", "b", "c"], 'call': ('x', "abc")}],
'ast': [ 'ast': [
(Token.FUNC, 'x', ['a'], [ (TokenTypes.FUNC, 'x', ['a'], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), None, (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None,
(Token.FIELD, 'split', (TokenTypes.FIELD, 'split',
(Token.CALL, [ (TokenTypes.CALL, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.STR, ''), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.STR, ''), None, None)]), None)
], None)) ], None))
)]), )]),
None) None)
@ -64,32 +64,32 @@ tests = [
''', ''',
'asserts': [{'value': 0, 'call': ('c',)}], 'asserts': [{'value': 0, 'call': ('c',)}],
'ast': [ 'ast': [
(Token.FUNC, 'a', ['x'], [ (TokenTypes.FUNC, 'a', ['x'], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]), None)
])) ]))
]), ]),
(Token.FUNC, 'b', ['x'], [ (TokenTypes.FUNC, 'b', ['x'], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None) ]), None)
])) ]))
]), ]),
(Token.FUNC, 'c', [], [ (TokenTypes.FUNC, 'c', [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ARRAY, [ (TokenTypes.MEMBER, (TokenTypes.ARRAY, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'b'), None, None)]), None) (TokenTypes.MEMBER, (TokenTypes.ID, 'b'), None, None)]), None)
]), None, (Token.ELEM, (Token.EXPR, [ ]), None, (TokenTypes.ELEM, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]), None)
]), (Token.CALL, [ ]), (TokenTypes.CALL, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]), None)
], None))) ], None)))
]), None) ]), None)
])) ]))

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
skip = {'jsinterp': 'Not yet fully implemented'} skip = {'jsinterp': 'Not yet fully implemented'}
@ -17,27 +17,27 @@ tests = [
''', ''',
'asserts': [{'value': 52, 'call': ('x',)}], 'asserts': [{'value': 52, 'call': ('x',)}],
'ast': [ 'ast': [
(Token.FUNC, 'x', [], [ (TokenTypes.FUNC, 'x', [], [
(Token.VAR, zip( (TokenTypes.VAR, zip(
['x'], ['x'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 2), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)]),
None)] None)]
)), )),
(Token.VAR, zip( (TokenTypes.VAR, zip(
['y'], ['y'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 50), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 50), None, None)]),
None)] None)]
)), )),
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.MEMBER, (Token.ID, 'y'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'y'), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None) ]), None)
])) ]))
]) ])
@ -52,28 +52,28 @@ tests = [
''', ''',
'asserts': [{'value': 3, 'call': ('f',)}], 'asserts': [{'value': 3, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, zip( (TokenTypes.VAR, zip(
['x'], ['x'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.STR, '/*'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.STR, '/*'), None, None)]),
None)] None)]
)), )),
(Token.VAR, zip( (TokenTypes.VAR, zip(
['y'], ['y'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.MEMBER, (Token.INT, 2), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), ]),
None)] None)]
)), )),
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'y'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'y'), None, None)]),
None) None)
])) ]))
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
skip = { skip = {
'jsinterp': 'Debugger statement is not supported', 'jsinterp': 'Debugger statement is not supported',

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS
skip = { skip = {
@ -21,30 +21,30 @@ tests = [
''', ''',
'asserts': [{'value': 5, 'call': ('f', 5)}], 'asserts': [{'value': 5, 'call': ('f', 5)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', ['x'], [ (TokenTypes.FUNC, 'f', ['x'], [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['='][1], (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'i'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 1), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None)]), None))
]), ]),
(Token.DO, (TokenTypes.DO,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None),
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.REL, _RELATIONS['<'][1]) (TokenTypes.REL, _RELATIONS['<'][1])
]), None) ]), None)
]), ]),
(Token.BLOCK, [ (TokenTypes.BLOCK, [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None),
(Token.POSTFIX, _UNARY_OPERATORS['++'][1]) (TokenTypes.POSTFIX, _UNARY_OPERATORS['++'][1])
]), None) ]), None)
]) ])
])), ])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,21 +1,21 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
tests = [ tests = [
{'code': 'function f() { return; y(); }', {'code': 'function f() { return; y(); }',
'asserts': [{'value': None, 'call': ('f',)}], 'asserts': [{'value': None, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, None), (TokenTypes.RETURN, None),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (TokenTypes.MEMBER,
(Token.ID, 'y'), (TokenTypes.ID, 'y'),
None, None,
(Token.CALL, [], None) (TokenTypes.CALL, [], None)
) )
]), ]),
None) None)

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS
skip = { skip = {
@ -21,30 +21,30 @@ tests = [
''', ''',
'asserts': [{'value': 5, 'call': ('f', 5)}], 'asserts': [{'value': 5, 'call': ('f', 5)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', ['x'], [ (TokenTypes.FUNC, 'f', ['x'], [
(Token.VAR, zip(['h'], [ (TokenTypes.VAR, zip(['h'], [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]), None)
])), ])),
(Token.FOR, (TokenTypes.FOR,
None, None,
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'h'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None),
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.REL, _RELATIONS['<='][1]) (TokenTypes.REL, _RELATIONS['<='][1])
]), None)]), ]), None)]),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'h'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None),
(Token.PREFIX, _UNARY_OPERATORS['++'][1]) (TokenTypes.PREFIX, _UNARY_OPERATORS['++'][1])
]), None)]), ]), None)]),
(Token.BLOCK, [ (TokenTypes.BLOCK, [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['='][1], (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'a'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'h'), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None)]), None))
]) ])
])), ])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS
skip = { skip = {
@ -20,24 +20,24 @@ tests = [
''', ''',
'asserts': [{'value': 'c', 'call': ('f', ['a', 'b', 'c'])}], 'asserts': [{'value': 'c', 'call': ('f', ['a', 'b', 'c'])}],
'ast': [ 'ast': [
(Token.FUNC, 'f', ['z'], [ (TokenTypes.FUNC, 'f', ['z'], [
(Token.FOR, (TokenTypes.FOR,
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'h'), None, None) (TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None)
]), None)]), ]), None)]),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'z'), None, None) (TokenTypes.MEMBER, (TokenTypes.ID, 'z'), None, None)
]), None)]), ]), None)]),
None, None,
(Token.BLOCK, [ (TokenTypes.BLOCK, [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['='][1], (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'a'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'h'), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None)]), None))
]) ])
])), ])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS
skip = { skip = {
@ -20,29 +20,29 @@ tests = [
''', ''',
'asserts': [{'value': 5, 'call': ('f', 5)}], 'asserts': [{'value': 5, 'call': ('f', 5)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', ['x'], [ (TokenTypes.FUNC, 'f', ['x'], [
(Token.FOR, (TokenTypes.FOR,
(Token.VAR, zip(['h'], [ (TokenTypes.VAR, zip(['h'], [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]), None)
])), ])),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'h'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None),
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.REL, _RELATIONS['<='][1]) (TokenTypes.REL, _RELATIONS['<='][1])
]), None)]), ]), None)]),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'h'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None),
(Token.PREFIX, _UNARY_OPERATORS['++'][1]) (TokenTypes.PREFIX, _UNARY_OPERATORS['++'][1])
]), None)]), ]), None)]),
(Token.BLOCK, [ (TokenTypes.BLOCK, [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['='][1], (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'a'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'h'), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'h'), None, None)]), None))
]) ])
])), ])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS
skip = { skip = {
@ -23,38 +23,38 @@ tests = [
''', ''',
'asserts': [{'value': 3, 'call': ('f',)}], 'asserts': [{'value': 3, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, zip(['add'], [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.VAR, zip(['add'], [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.FUNC, None, [], [ (TokenTypes.MEMBER, (TokenTypes.FUNC, None, [], [
(Token.VAR, zip( (TokenTypes.VAR, zip(
['counter'], ['counter'],
[(Token.ASSIGN, None, (Token.OPEXPR, [ [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 0), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)
]), None)] ]), None)]
)), )),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.FUNC, None, [], [ (TokenTypes.MEMBER, (TokenTypes.FUNC, None, [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['+='][1], (Token.OPEXPR, [ (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['+='][1], (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'counter'), None, None) (TokenTypes.MEMBER, (TokenTypes.ID, 'counter'), None, None)
]), (Token.ASSIGN, None, (Token.OPEXPR, [ ]), (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None)
]), None)) ]), None))
])) ]))
]), None, None) ]), None, None)
]), None)])) ]), None)]))
]), None, None), ]), None, None),
]), None)]), None, (Token.CALL, [], None)) ]), None)]), None, (TokenTypes.CALL, [], None))
]), None)])), ]), None)])),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'add'), None, (Token.CALL, [], None)) (TokenTypes.MEMBER, (TokenTypes.ID, 'add'), None, (TokenTypes.CALL, [], None))
]), None)]), ]), None)]),
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'add'), None, (Token.CALL, [], None)) (TokenTypes.MEMBER, (TokenTypes.ID, 'add'), None, (TokenTypes.CALL, [], None))
]), None)]), ]), None)]),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'add'), None, (Token.CALL, [], None)) (TokenTypes.MEMBER, (TokenTypes.ID, 'add'), None, (TokenTypes.CALL, [], None))
]), None)])) ]), None)]))
]) ])
] ]

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
skip = {'jsinterp': 'Field access is not supported'} skip = {'jsinterp': 'Field access is not supported'}
@ -10,16 +10,16 @@ tests = [
'asserts': [{'value': 3, 'call': ('f',)}], 'asserts': [{'value': 3, 'call': ('f',)}],
'globals': {'a': {'var': 3}}, 'globals': {'a': {'var': 3}},
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (TokenTypes.MEMBER,
(Token.ID, 'a'), (TokenTypes.ID, 'a'),
None, None,
(Token.FIELD, 'var', None)), (TokenTypes.FIELD, 'var', None)),
]), ]),
None) None)
])) ]))

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
skip = { skip = {
'jsinterp': 'Label statement is not supported', 'jsinterp': 'Label statement is not supported',

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS
tests = [ tests = [
@ -8,22 +8,22 @@ tests = [
'code': 'function f() { x = 2 ; return x; }', 'code': 'function f() { x = 2 ; return x; }',
'asserts': [{'value': 2, 'call': ('f',)}], 'asserts': [{'value': 2, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.EXPR, (TokenTypes.EXPR,
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]),
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 2), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)]),
None) None)
)] )]
), ),
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]),
None) None)
]) ])
) )

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _OPERATORS
skip = { skip = {
@ -21,32 +21,32 @@ tests = [
} }
''', ''',
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, (TokenTypes.VAR,
zip(['o'], zip(['o'],
[(Token.ASSIGN, None, (Token.OPEXPR, [ [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.OBJECT, [ (TokenTypes.MEMBER, (TokenTypes.OBJECT, [
('a', (Token.PROPVALUE, (Token.ASSIGN, None, (Token.OPEXPR, [ ('a', (TokenTypes.PROPVALUE, (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 7), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 7), None, None)
]), None))), ]), None))),
('b', (Token.PROPGET, [ ('b', (TokenTypes.PROPGET, [
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.RSV, 'this'), None, (Token.FIELD, 'a', None)), (TokenTypes.MEMBER, (TokenTypes.RSV, 'this'), None, (TokenTypes.FIELD, 'a', None)),
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None)])) ]), None)]))
])), ])),
('c', (Token.PROPSET, 'x', [ ('c', (TokenTypes.PROPSET, 'x', [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.RSV, 'this'), None, (Token.FIELD, 'a', None)) (TokenTypes.MEMBER, (TokenTypes.RSV, 'this'), None, (TokenTypes.FIELD, 'a', None))
]), ]),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.MEMBER, (Token.INT, 2), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None),
(Token.OP, _OPERATORS['/'][1]) (TokenTypes.OP, _OPERATORS['/'][1])
]), None)) ]), None))
]) ])
])) ]))
@ -55,8 +55,8 @@ tests = [
]), None)] ]), None)]
) )
), ),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'o'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'o'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
tests = [ tests = [
@ -8,13 +8,13 @@ tests = [
'code': 'function f() { return 1 << 5; }', 'code': 'function f() { return 1 << 5; }',
'asserts': [{'value': 32, 'call': ('f',)}], 'asserts': [{'value': 32, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.MEMBER, (Token.INT, 5), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 5), None, None),
(Token.OP, _OPERATORS['<<'][1]) (TokenTypes.OP, _OPERATORS['<<'][1])
]), None) ]), None)
])) ]))
]) ])
@ -23,13 +23,13 @@ tests = [
'code': 'function f() { return 19 & 21;}', 'code': 'function f() { return 19 & 21;}',
'asserts': [{'value': 17, 'call': ('f',)}], 'asserts': [{'value': 17, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 19), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 19), None, None),
(Token.MEMBER, (Token.INT, 21), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 21), None, None),
(Token.OP, _OPERATORS['&'][1]) (TokenTypes.OP, _OPERATORS['&'][1])
]), None) ]), None)
])) ]))
]) ])
@ -38,13 +38,13 @@ tests = [
'code': 'function f() { return 11 >> 2;}', 'code': 'function f() { return 11 >> 2;}',
'asserts': [{'value': 2, 'call': ('f',)}], 'asserts': [{'value': 2, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 11), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 11), None, None),
(Token.MEMBER, (Token.INT, 2), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None),
(Token.OP, _OPERATORS['>>'][1]) (TokenTypes.OP, _OPERATORS['>>'][1])
]), None) ]), None)
])) ]))
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
tests = [ tests = [
@ -8,20 +8,20 @@ tests = [
'code': 'function f() { return (1 + 2) * 3; }', 'code': 'function f() { return (1 + 2) * 3; }',
'asserts': [{'value': 9, 'call': ('f',)}], 'asserts': [{'value': 9, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.MEMBER, (Token.INT, 2), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None) ]), None)
]), None, None), ]), None, None),
(Token.MEMBER, (Token.INT, 3), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 3), None, None),
(Token.OP, _OPERATORS['*'][1]) (TokenTypes.OP, _OPERATORS['*'][1])
]), None) ]), None)
])) ]))
]) ])
@ -30,33 +30,33 @@ tests = [
'code': 'function f() { return (1) + (2) * ((( (( (((((3)))))) )) ));}', 'code': 'function f() { return (1) + (2) * ((( (( (((((3)))))) )) ));}',
'asserts': [{'value': 7, 'call': ('f',)}], 'asserts': [{'value': 7, 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.RETURN, (Token.EXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (TokenTypes.ASSIGN, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 1), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None)
]), None)]), None, None), ]), None)]), None, None),
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 2), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 2), None, None)
]), None)]), None, None), ]), None)]), None, None),
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.MEMBER, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (TokenTypes.MEMBER,
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (TokenTypes.MEMBER,
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 3), None, None) (TokenTypes.MEMBER, (TokenTypes.INT, 3), None, None)
]), None)]), None, None) ]), None)]), None, None)
]), None)]), None, None) ]), None)]), None, None)
]), None)]), None, None) ]), None)]), None, None)
@ -70,8 +70,8 @@ tests = [
]), None)]), None, None) ]), None)]), None, None)
]), None)]), None, None), ]), None)]), None, None),
(Token.OP, _OPERATORS['*'][1]), (TokenTypes.OP, _OPERATORS['*'][1]),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), None) ]), None)
])) ]))
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _OPERATORS
skip = {'interpret': 'Interpreting built-in fields are not yet implemented'} skip = {'interpret': 'Interpreting built-in fields are not yet implemented'}
@ -17,67 +17,67 @@ tests = [
''', ''',
'asserts': [{'value': [20, 20, 30, 40, 50], 'call': ('f',)}], 'asserts': [{'value': [20, 20, 30, 40, 50], 'call': ('f',)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', [], [ (TokenTypes.FUNC, 'f', [], [
(Token.VAR, (TokenTypes.VAR,
zip(['a'], zip(['a'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ARRAY, [ (TokenTypes.MEMBER, (TokenTypes.ARRAY, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 10), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 10), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 20), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 20), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 30), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 30), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 40), None, None)]), None), (TokenTypes.MEMBER, (TokenTypes.INT, 40), None, None)]), None),
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 50), None, None)]), None) (TokenTypes.MEMBER, (TokenTypes.INT, 50), None, None)]), None)
]), None, None), ]), None, None),
]), ]),
None) None)
]) ])
), ),
(Token.VAR, (TokenTypes.VAR,
zip(['b'], zip(['b'],
[(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 6), None, None)]), None)] [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 6), None, None)]), None)]
) )
), ),
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'),
None, None,
(Token.ELEM, (TokenTypes.ELEM,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]),
None) None)
]), ]),
None)) None))
]), ]),
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'a'), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'),
None, None,
(Token.ELEM, (Token.EXPR, [ (TokenTypes.ELEM, (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'b'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'b'), None, None),
(Token.MEMBER, (Token.ID, 'a'), None, (Token.FIELD, 'length', None)), (TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, (TokenTypes.FIELD, 'length', None)),
(Token.OP, _OPERATORS['%'][1]) (TokenTypes.OP, _OPERATORS['%'][1])
]), None)]), ]), None)]),
None)) None))
]), ]),
None) None)
) )
]), ]),
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'a'), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'a'), None, None)]), None)
]) ])
) )
]) ])

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _OPERATORS from youtube_dl.jsinterp2.tstream import _OPERATORS
tests = [ tests = [
@ -8,24 +8,24 @@ tests = [
'code': 'function $_xY1 ($_axY1) { var $_axY2 = $_axY1 + 1; return $_axY2; }', 'code': 'function $_xY1 ($_axY1) { var $_axY2 = $_axY1 + 1; return $_axY2; }',
'asserts': [{'value': 21, 'call': ('$_xY1', 20)}], 'asserts': [{'value': 21, 'call': ('$_xY1', 20)}],
'ast': [ 'ast': [
(Token.FUNC, '$_xY1', ['$_axY1'], [ (TokenTypes.FUNC, '$_xY1', ['$_axY1'], [
(Token.VAR, (TokenTypes.VAR,
zip(['$_axY2'], zip(['$_axY2'],
[(Token.ASSIGN, [(TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [ (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, '$_axY1'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, '$_axY1'), None, None),
(Token.MEMBER, (Token.INT, 1), None, None), (TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None),
(Token.OP, _OPERATORS['+'][1]) (TokenTypes.OP, _OPERATORS['+'][1])
]), ]),
None) None)
]) ])
), ),
(Token.RETURN, (TokenTypes.RETURN,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
None, None,
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, '$_axY2'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, '$_axY2'), None, None)]),
None)] None)]
) )
) )

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS
skip = { skip = {
@ -31,47 +31,47 @@ tests = [
{'value': 6, 'call': ('a', 6)}, {'value': 6, 'call': ('a', 6)},
{'value': 8, 'call': ('a', 7)}], {'value': 8, 'call': ('a', 7)}],
'ast': [ 'ast': [
(Token.FUNC, 'a', ['x'], [ (TokenTypes.FUNC, 'a', ['x'], [
(Token.SWITCH, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.SWITCH, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None) (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)
]), None)]), ]), None)]),
[ [
((Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ ((TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 6), None, None)]), None)]), (TokenTypes.MEMBER, (TokenTypes.INT, 6), None, None)]), None)]),
[ [
(Token.BREAK, None) (TokenTypes.BREAK, None)
]), ]),
((Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ ((TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 5), None, None)]), None)]), (TokenTypes.MEMBER, (TokenTypes.INT, 5), None, None)]), None)]),
[ [
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.POSTFIX, _UNARY_OPERATORS['++'][1]) (TokenTypes.POSTFIX, _UNARY_OPERATORS['++'][1])
]), None)]) ]), None)])
]), ]),
((Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ ((TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.INT, 8), None, None)]), None)]), (TokenTypes.MEMBER, (TokenTypes.INT, 8), None, None)]), None)]),
[ [
(Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.POSTFIX, _UNARY_OPERATORS['--'][1]) (TokenTypes.POSTFIX, _UNARY_OPERATORS['--'][1])
]), None)]), ]), None)]),
(Token.BREAK, None) (TokenTypes.BREAK, None)
]), ]),
(None, (None,
[ [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, (TokenTypes.ASSIGN,
_ASSIGN_OPERATORS['='][1], _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'x'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 0), None, None)]), None) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 0), None, None)]), None)
) )
]) ])
]) ])
] ]
), ),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'x'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
skip = { skip = {
'jsinterp': 'Try statement is not supported', 'jsinterp': 'Try statement is not supported',

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS from youtube_dl.jsinterp2.tstream import _ASSIGN_OPERATORS, _UNARY_OPERATORS, _RELATIONS
skip = { skip = {
@ -21,30 +21,30 @@ tests = [
''', ''',
'asserts': [{'value': 5, 'call': ('f', 5)}], 'asserts': [{'value': 5, 'call': ('f', 5)}],
'ast': [ 'ast': [
(Token.FUNC, 'f', ['x'], [ (TokenTypes.FUNC, 'f', ['x'], [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, _ASSIGN_OPERATORS['='][1], (TokenTypes.ASSIGN, _ASSIGN_OPERATORS['='][1],
(Token.OPEXPR, [(Token.MEMBER, (Token.ID, 'i'), None, None)]), (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None)]),
(Token.ASSIGN, None, (Token.OPEXPR, [(Token.MEMBER, (Token.INT, 1), None, None)]), None)) (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [(TokenTypes.MEMBER, (TokenTypes.INT, 1), None, None)]), None))
]), ]),
(Token.WHILE, (TokenTypes.WHILE,
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None),
(Token.MEMBER, (Token.ID, 'x'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'x'), None, None),
(Token.REL, _RELATIONS['<'][1]) (TokenTypes.REL, _RELATIONS['<'][1])
]), None) ]), None)
]), ]),
(Token.BLOCK, [ (TokenTypes.BLOCK, [
(Token.EXPR, [ (TokenTypes.EXPR, [
(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None), (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None),
(Token.POSTFIX, _UNARY_OPERATORS['++'][1]) (TokenTypes.POSTFIX, _UNARY_OPERATORS['++'][1])
]), None) ]), None)
]) ])
])), ])),
(Token.RETURN, (Token.EXPR, [(Token.ASSIGN, None, (Token.OPEXPR, [ (TokenTypes.RETURN, (TokenTypes.EXPR, [(TokenTypes.ASSIGN, None, (TokenTypes.OPEXPR, [
(Token.MEMBER, (Token.ID, 'i'), None, None)]), None)])) (TokenTypes.MEMBER, (TokenTypes.ID, 'i'), None, None)]), None)]))
]) ])
] ]
} }

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsgrammar import Token from youtube_dl.jsinterp2.jsgrammar import TokenTypes
skip = { skip = {
'jsinterp': 'With statement is not supported', 'jsinterp': 'With statement is not supported',

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from youtube_dl.jsinterp2.jsbuilt_ins.internals import to_uint32, to_integer from .internals import to_uint32, to_integer
from .base import native_number, undefined from .base import native_number, undefined
from .jsobject import JSObject, JSObjectPrototype from .jsobject import JSObject, JSObjectPrototype
from .jsnumber import JSNumberPrototype from .jsnumber import JSNumberPrototype

View File

@ -20,7 +20,7 @@ _token_names = ('COPEN', 'CCLOSE', 'POPEN', 'PCLOSE', 'SOPEN', 'SCLOSE',
'PROPGET', 'PROPSET', 'PROPVALUE', 'PROPGET', 'PROPSET', 'PROPVALUE',
'RSV') 'RSV')
Token = namedtuple('Token', _token_names)._make(_token_names) TokenTypes = namedtuple('Token', _token_names)._make(_token_names)
__DECIMAL_RE = r'(?:[1-9][0-9]*)|0' __DECIMAL_RE = r'(?:[1-9][0-9]*)|0'
__OCTAL_RE = r'0[0-7]+' __OCTAL_RE = r'0[0-7]+'
@ -61,20 +61,21 @@ _NULL_RE = r'null'
_REGEX_FLAGS_RE = r'(?![gimy]*(?P<reflag>[gimy])[gimy]*(?P=reflag))(?P<%s>[gimy]{0,4}\b)' % 'REFLAGS' _REGEX_FLAGS_RE = r'(?![gimy]*(?P<reflag>[gimy])[gimy]*(?P=reflag))(?P<%s>[gimy]{0,4}\b)' % 'REFLAGS'
_REGEX_RE = r'/(?!\*)(?P<%s>(?:[^/\n]|(?:\\/))*)/(?:(?:%s)|(?:\s|$))' % ('REBODY', _REGEX_FLAGS_RE) _REGEX_RE = r'/(?!\*)(?P<%s>(?:[^/\n]|(?:\\/))*)/(?:(?:%s)|(?:\s|$))' % ('REBODY', _REGEX_FLAGS_RE)
token_keys = Token.NULL, Token.BOOL, Token.ID, Token.STR, Token.INT, Token.FLOAT, Token.REGEX token_keys = TokenTypes.NULL, TokenTypes.BOOL, TokenTypes.ID, TokenTypes.STR, TokenTypes.INT, TokenTypes.FLOAT, TokenTypes.REGEX
_TOKENS = zip(token_keys, (_NULL_RE, _BOOL_RE, _NAME_RE, _STRING_RE, _INTEGER_RE, _FLOAT_RE, _REGEX_RE)) _TOKENS = zip(token_keys, (_NULL_RE, _BOOL_RE, _NAME_RE, _STRING_RE, _INTEGER_RE, _FLOAT_RE, _REGEX_RE))
COMMENT_RE = r'(?P<%s>/\*(?:(?!\*/)(?:\n|.))*\*/)' % Token.COMMENT COMMENT_RE = r'(?P<%s>/\*(?:(?!\*/)(?:\n|.))*\*/)' % TokenTypes.COMMENT
TOKENS_RE = r'|'.join('(?P<%(id)s>%(value)s)' % {'id': name, 'value': value} TOKENS_RE = r'|'.join('(?P<%(id)s>%(value)s)' % {'id': name, 'value': value}
for name, value in _TOKENS) for name, value in _TOKENS)
LOGICAL_OPERATORS_RE = r'(?P<%s>%s)' % (Token.LOP, r'|'.join(re.escape(value) for value in _logical_operator)) LOGICAL_OPERATORS_RE = r'(?P<%s>%s)' % (TokenTypes.LOP, r'|'.join(re.escape(value) for value in _logical_operator))
UNARY_OPERATORS_RE = r'(?P<%s>%s)' % (Token.UOP, r'|'.join(re.escape(value) for value in _unary_operator)) UNARY_OPERATORS_RE = r'(?P<%s>%s)' % (TokenTypes.UOP, r'|'.join(re.escape(value) for value in _unary_operator))
ASSIGN_OPERATORS_RE = r'(?P<%s>%s)' % (Token.AOP, ASSIGN_OPERATORS_RE = r'(?P<%s>%s)' % (TokenTypes.AOP,
r'|'.join(re.escape(value) if value != '=' else re.escape(value) + r'(?!\=)' r'|'.join(re.escape(value) if value != '=' else re.escape(value) + r'(?!\=)'
for value in _assign_operator)) for value in _assign_operator))
OPERATORS_RE = r'(?P<%s>%s)' % (Token.OP, r'|'.join(re.escape(value) for value in _operator)) OPERATORS_RE = r'(?P<%s>%s)' % (TokenTypes.OP, r'|'.join(re.escape(value) for value in _operator))
RELATIONS_RE = r'(?P<%s>%s)' % (Token.REL, r'|'.join(re.escape(value) for value in _relation)) RELATIONS_RE = r'(?P<%s>%s)' % (TokenTypes.REL, r'|'.join(re.escape(value) for value in _relation))
PUNCTUATIONS_RE = r'(?P<%s>%s)' % (Token.PUNCT, r'|'.join(re.escape(value) for value in _punctuations)) PUNCTUATIONS_RE = r'(?P<%s>%s)' % (TokenTypes.PUNCT, r'|'.join(re.escape(value) for value in _punctuations))
LINETERMINATORSEQ_RE = r'\n|\r(?!\n)|\u2028|\u2029'

View File

@ -5,7 +5,7 @@ import re
from ..compat import compat_str from ..compat import compat_str
from ..utils import ExtractorError from ..utils import ExtractorError
from .jsparser import Parser from .jsparser import Parser
from .jsgrammar import Token, token_keys from .jsgrammar import TokenTypes, token_keys
from .jsbuilt_ins import global_obj from .jsbuilt_ins import global_obj
from .jsbuilt_ins.base import isprimitive from .jsbuilt_ins.base import isprimitive
from .jsbuilt_ins.internals import to_string from .jsbuilt_ins.internals import to_string
@ -101,7 +101,7 @@ class JSInterpreter(object):
name = stmt[0] name = stmt[0]
ref = None ref = None
if name == Token.FUNC: if name == TokenTypes.FUNC:
name, args, body = stmt[1:] name, args, body = stmt[1:]
if name is not None: if name is not None:
if self._context_stack: if self._context_stack:
@ -110,23 +110,23 @@ class JSInterpreter(object):
self.global_vars[name] = Reference(self.build_function(args, body), (self.this, name)) self.global_vars[name] = Reference(self.build_function(args, body), (self.this, name))
else: else:
raise ExtractorError('Function expression is not yet implemented') raise ExtractorError('Function expression is not yet implemented')
elif name is Token.BLOCK: elif name is TokenTypes.BLOCK:
block = stmt[1] block = stmt[1]
for stmt in block: for stmt in block:
s = self.interpret_statement(stmt) s = self.interpret_statement(stmt)
if s is not None: if s is not None:
ref = s.getvalue() ref = s.getvalue()
elif name is Token.VAR: elif name is TokenTypes.VAR:
for name, value in stmt[1]: for name, value in stmt[1]:
value = (self.interpret_expression(value).getvalue() if value is not None else value = (self.interpret_expression(value).getvalue() if value is not None else
global_obj.get_prop('undefined')) global_obj.get_prop('undefined'))
self.this[name] = Reference(value, (self.this, name)) self.this[name] = Reference(value, (self.this, name))
elif name is Token.EXPR: elif name is TokenTypes.EXPR:
for expr in stmt[1]: for expr in stmt[1]:
ref = self.interpret_expression(expr) ref = self.interpret_expression(expr)
# if # if
# continue, break # continue, break
elif name is Token.RETURN: elif name is TokenTypes.RETURN:
ref = self.interpret_statement(stmt[1]) ref = self.interpret_statement(stmt[1])
self._context.ended = True self._context.ended = True
# with # with
@ -144,7 +144,7 @@ class JSInterpreter(object):
return return
name = expr[0] name = expr[0]
if name is Token.ASSIGN: if name is TokenTypes.ASSIGN:
op, left, right = expr[1:] op, left, right = expr[1:]
if op is None: if op is None:
ref = self.interpret_expression(left) ref = self.interpret_expression(left)
@ -154,11 +154,11 @@ class JSInterpreter(object):
except ExtractorError: except ExtractorError:
lname = left[0] lname = left[0]
key = None key = None
if lname is Token.OPEXPR and len(left[1]) == 1: if lname is TokenTypes.OPEXPR and len(left[1]) == 1:
lname = left[1][0][0] lname = left[1][0][0]
if lname is Token.MEMBER: if lname is TokenTypes.MEMBER:
lid, args, tail = left[1][0][1:] lid, args, tail = left[1][0][1:]
if lid[0] is Token.ID and args is None and tail is None: if lid[0] is TokenTypes.ID and args is None and tail is None:
key = lid[1] key = lid[1]
if key is not None: if key is not None:
u = Reference(global_obj.get_prop('undefined'), (self.this, key)) u = Reference(global_obj.get_prop('undefined'), (self.this, key))
@ -171,10 +171,10 @@ class JSInterpreter(object):
# XXX check specs what to return # XXX check specs what to return
ref = leftref ref = leftref
elif name is Token.EXPR: elif name is TokenTypes.EXPR:
ref = self.interpret_statement(expr) ref = self.interpret_statement(expr)
elif name is Token.OPEXPR: elif name is TokenTypes.OPEXPR:
stack = [] stack = []
postfix = [] postfix = []
rpn = expr[1][:] rpn = expr[1][:]
@ -182,18 +182,18 @@ class JSInterpreter(object):
while rpn: while rpn:
token = rpn.pop(0) token = rpn.pop(0)
# XXX relation 'in' 'instanceof' # XXX relation 'in' 'instanceof'
if token[0] in (Token.OP, Token.AOP, Token.LOP, Token.REL): if token[0] in (TokenTypes.OP, TokenTypes.AOP, TokenTypes.LOP, TokenTypes.REL):
right = stack.pop() right = stack.pop()
left = stack.pop() left = stack.pop()
stack.append(Reference(token[1](left.getvalue(), right.getvalue()))) stack.append(Reference(token[1](left.getvalue(), right.getvalue())))
# XXX add unary operator 'delete', 'void', 'instanceof' # XXX add unary operator 'delete', 'void', 'instanceof'
elif token[0] is Token.UOP: elif token[0] is TokenTypes.UOP:
right = stack.pop() right = stack.pop()
stack.append(Reference(token[1](right.getvalue()))) stack.append(Reference(token[1](right.getvalue())))
elif token[0] is Token.PREFIX: elif token[0] is TokenTypes.PREFIX:
right = stack.pop() right = stack.pop()
stack.append(Reference(right.putvalue(token[1](right.getvalue())))) stack.append(Reference(right.putvalue(token[1](right.getvalue()))))
elif token[0] is Token.POSTFIX: elif token[0] is TokenTypes.POSTFIX:
postfix.append((stack[-1], token[1])) postfix.append((stack[-1], token[1]))
else: else:
stack.append(self.interpret_expression(token)) stack.append(self.interpret_expression(token))
@ -205,7 +205,7 @@ class JSInterpreter(object):
else: else:
raise ExtractorError('Expression has too many values') raise ExtractorError('Expression has too many values')
elif name is Token.MEMBER: elif name is TokenTypes.MEMBER:
# TODO interpret member # TODO interpret member
target, args, tail = expr[1:] target, args, tail = expr[1:]
target = self.interpret_expression(target) target = self.interpret_expression(target)
@ -215,13 +215,13 @@ class JSInterpreter(object):
source = None source = None
while tail is not None: while tail is not None:
tail_name, tail_value, tail = tail tail_name, tail_value, tail = tail
if tail_name is Token.FIELD: if tail_name is TokenTypes.FIELD:
source = to_js(target.getvalue()) source = to_js(target.getvalue())
target = source.get_prop(tail_value) target = source.get_prop(tail_value)
elif tail_name is Token.ELEM: elif tail_name is TokenTypes.ELEM:
prop = self.interpret_expression(tail_value).getvalue() prop = self.interpret_expression(tail_value).getvalue()
target = to_js(target.getvalue()).get_prop(to_string(to_js(prop))) target = to_js(target.getvalue()).get_prop(to_string(to_js(prop)))
elif tail_name is Token.CALL: elif tail_name is TokenTypes.CALL:
args = (self.interpret_expression(arg).getvalue() for arg in tail_value) args = (self.interpret_expression(arg).getvalue() for arg in tail_value)
if isprimitive(target): if isprimitive(target):
if source is None: if source is None:
@ -239,7 +239,7 @@ class JSInterpreter(object):
target = Reference(target.getvalue()) target = Reference(target.getvalue())
ref = target ref = target
elif name is Token.ID: elif name is TokenTypes.ID:
# XXX error handling (unknown id) # XXX error handling (unknown id)
id = expr[1] id = expr[1]
try: try:
@ -255,7 +255,7 @@ class JSInterpreter(object):
elif name in token_keys: elif name in token_keys:
ref = Reference(expr[1]) ref = Reference(expr[1])
elif name is Token.ARRAY: elif name is TokenTypes.ARRAY:
array = [] array = []
for key, elem in enumerate(expr[1]): for key, elem in enumerate(expr[1]):
value = self.interpret_expression(elem).getvalue() value = self.interpret_expression(elem).getvalue()

File diff suppressed because it is too large Load Diff

View File

@ -5,82 +5,83 @@ import operator
from ..utils import ExtractorError from ..utils import ExtractorError
from .jsgrammar import ( from .jsgrammar import (
ASSIGN_OPERATORS_RE,
COMMENT_RE, COMMENT_RE,
LINETERMINATORSEQ_RE,
LOGICAL_OPERATORS_RE,
OPERATORS_RE,
TOKENS_RE, TOKENS_RE,
PUNCTUATIONS_RE, PUNCTUATIONS_RE,
LOGICAL_OPERATORS_RE,
UNARY_OPERATORS_RE,
RELATIONS_RE, RELATIONS_RE,
ASSIGN_OPERATORS_RE, UNARY_OPERATORS_RE,
OPERATORS_RE, TokenTypes
Token
) )
_PUNCTUATIONS = { _PUNCTUATIONS = {
'{': Token.COPEN, '{': TokenTypes.COPEN,
'}': Token.CCLOSE, '}': TokenTypes.CCLOSE,
'(': Token.POPEN, '(': TokenTypes.POPEN,
')': Token.PCLOSE, ')': TokenTypes.PCLOSE,
'[': Token.SOPEN, '[': TokenTypes.SOPEN,
']': Token.SCLOSE, ']': TokenTypes.SCLOSE,
'.': Token.DOT, '.': TokenTypes.DOT,
';': Token.END, ';': TokenTypes.END,
',': Token.COMMA, ',': TokenTypes.COMMA,
'?': Token.HOOK, '?': TokenTypes.HOOK,
':': Token.COLON ':': TokenTypes.COLON
} }
_LOGICAL_OPERATORS = { _LOGICAL_OPERATORS = {
'&&': (Token.AND, lambda cur, right: cur and right), '&&': (TokenTypes.AND, lambda cur, right: cur and right),
'||': (Token.OR, lambda cur, right: cur or right) '||': (TokenTypes.OR, lambda cur, right: cur or right)
} }
_UNARY_OPERATORS = { _UNARY_OPERATORS = {
'+': (Token.PLUS, lambda cur: cur), '+': (TokenTypes.PLUS, lambda cur: cur),
'-': (Token.NEG, lambda cur: cur * -1), '-': (TokenTypes.NEG, lambda cur: cur * -1),
'++': (Token.INC, lambda cur: cur + 1), '++': (TokenTypes.INC, lambda cur: cur + 1),
'--': (Token.DEC, lambda cur: cur - 1), '--': (TokenTypes.DEC, lambda cur: cur - 1),
'!': (Token.NOT, operator.not_), '!': (TokenTypes.NOT, operator.not_),
'~': (Token.BNOT, operator.inv), '~': (TokenTypes.BNOT, operator.inv),
# XXX define these operators # XXX define these operators
'delete': (Token.DEL, None), 'delete': (TokenTypes.DEL, None),
'void': (Token.VOID, None), 'void': (TokenTypes.VOID, None),
'typeof': (Token.TYPE, lambda cur: type(cur)) 'typeof': (TokenTypes.TYPE, lambda cur: type(cur))
} }
_RELATIONS = { _RELATIONS = {
'<': (Token.LT, operator.lt), '<': (TokenTypes.LT, operator.lt),
'>': (Token.GT, operator.gt), '>': (TokenTypes.GT, operator.gt),
'<=': (Token.LE, operator.le), '<=': (TokenTypes.LE, operator.le),
'>=': (Token.GE, operator.ge), '>=': (TokenTypes.GE, operator.ge),
# XXX check python and JavaScript equality difference # XXX check python and JavaScript equality difference
'==': (Token.EQ, operator.eq), '==': (TokenTypes.EQ, operator.eq),
'!=': (Token.NE, operator.ne), '!=': (TokenTypes.NE, operator.ne),
'===': (Token.SEQ, lambda cur, right: cur == right and type(cur) == type(right)), '===': (TokenTypes.SEQ, lambda cur, right: cur == right and type(cur) == type(right)),
'!==': (Token.SNE, lambda cur, right: not cur == right or not type(cur) == type(right)), '!==': (TokenTypes.SNE, lambda cur, right: not cur == right or not type(cur) == type(right)),
'in': (Token.IN, operator.contains), 'in': (TokenTypes.IN, operator.contains),
'instanceof': (Token.INSTANCEOF, lambda cur, right: isinstance(cur, right)) 'instanceof': (TokenTypes.INSTANCEOF, lambda cur, right: isinstance(cur, right))
} }
_OPERATORS = { _OPERATORS = {
'|': (Token.BOR, operator.or_), '|': (TokenTypes.BOR, operator.or_),
'^': (Token.BXOR, operator.xor), '^': (TokenTypes.BXOR, operator.xor),
'&': (Token.BAND, operator.and_), '&': (TokenTypes.BAND, operator.and_),
# NOTE convert to int before shift float # NOTE convert to int before shift float
'>>': (Token.RSHIFT, operator.rshift), '>>': (TokenTypes.RSHIFT, operator.rshift),
'<<': (Token.LSHIFT, operator.lshift), '<<': (TokenTypes.LSHIFT, operator.lshift),
'>>>': (Token.URSHIFT, lambda cur, right: cur >> right if cur >= 0 else (cur + 0x100000000) >> right), '>>>': (TokenTypes.URSHIFT, lambda cur, right: cur >> right if cur >= 0 else (cur + 0x100000000) >> right),
'-': (Token.SUB, operator.sub), '-': (TokenTypes.SUB, operator.sub),
'+': (Token.ADD, operator.add), '+': (TokenTypes.ADD, operator.add),
'%': (Token.MOD, operator.mod), '%': (TokenTypes.MOD, operator.mod),
'/': (Token.DIV, operator.truediv), '/': (TokenTypes.DIV, operator.truediv),
'*': (Token.MUL, operator.mul) '*': (TokenTypes.MUL, operator.mul)
} }
_ASSIGN_OPERATORS = dict((op + '=', ('set_%s' % token[0], token[1])) for op, token in _OPERATORS.items()) _ASSIGN_OPERATORS = dict((op + '=', ('set_%s' % token[0], token[1])) for op, token in _OPERATORS.items())
_ASSIGN_OPERATORS['='] = ('set', lambda cur, right: right) _ASSIGN_OPERATORS['='] = ('set', lambda cur, right: right)
_operator_lookup = { _operator_lookup = {
Token.OP: _OPERATORS, TokenTypes.OP: _OPERATORS,
Token.AOP: _ASSIGN_OPERATORS, TokenTypes.AOP: _ASSIGN_OPERATORS,
Token.UOP: _UNARY_OPERATORS, TokenTypes.UOP: _UNARY_OPERATORS,
Token.LOP: _LOGICAL_OPERATORS, TokenTypes.LOP: _LOGICAL_OPERATORS,
Token.REL: _RELATIONS TokenTypes.REL: _RELATIONS
} }
# only to check ids # only to check ids
_reserved_words = ('break', 'case', 'catch', 'continue', 'debugger', 'default', 'delete', 'do', 'else', 'finally', _reserved_words = ('break', 'case', 'catch', 'continue', 'debugger', 'default', 'delete', 'do', 'else', 'finally',
@ -97,9 +98,21 @@ _input_element = re.compile(r'\s*(?:%(comment)s|%(token)s|%(lop)s|%(uop)s|%(aop)
'punct': PUNCTUATIONS_RE 'punct': PUNCTUATIONS_RE
}) })
_line_terminator = re.compile(LINETERMINATORSEQ_RE)
def convert_to_unary(token_value): def convert_to_unary(token_value):
return {Token.ADD: _UNARY_OPERATORS['+'], Token.SUB: _UNARY_OPERATORS['-']}[token_value[0]] return {TokenTypes.ADD: _UNARY_OPERATORS['+'], TokenTypes.SUB: _UNARY_OPERATORS['-']}[token_value[0]]
class Token(object):
def __init__(self, token_type, token_value, pos, line, at):
super(Token, self).__init__()
self.id = token_type
self.value = token_value
self.pos = pos
self.line = line
self.at = at
class TokenStream(object): class TokenStream(object):
@ -110,6 +123,7 @@ class TokenStream(object):
self.peeked = [] self.peeked = []
self._ts = self._next_token(start) self._ts = self._next_token(start)
self._last = None self._last = None
self._line = 1 + len(_line_terminator.findall(self.code[:start]))
def _next_token(self, pos=0): def _next_token(self, pos=0):
while not self.ended: while not self.ended:
@ -118,35 +132,43 @@ class TokenStream(object):
token_id = feed_m.lastgroup token_id = feed_m.lastgroup
token_value = feed_m.group(token_id) token_value = feed_m.group(token_id)
pos = feed_m.start(token_id) pos = feed_m.start(token_id)
token_id = Token[Token.index(token_id)] token_id = TokenTypes[TokenTypes.index(token_id)]
# TODO use line report insteadof position
lt_count, lt_match = 0, None
for lt_count, lt_match in enumerate(_line_terminator.finditer(token_value)): pass
lt_last = pos if lt_match is None else pos + lt_match.start()
at = pos - lt_last
self._line += lt_count
self.ended = feed_m.end() >= len(self.code) # because how yield works self.ended = feed_m.end() >= len(self.code) # because how yield works
if token_id is Token.COMMENT: if token_id is TokenTypes.COMMENT:
pass pass
# TODO date # TODO date
elif token_id is Token.NULL: elif token_id is TokenTypes.NULL:
yield (token_id, None, pos) yield Token(token_id, None, pos, self._line, at)
elif token_id is Token.BOOL: elif token_id is TokenTypes.BOOL:
yield (token_id, {'true': True, 'false': False}[token_value], pos) yield Token(token_id, {'true': True, 'false': False}[token_value], pos, self._line, at)
elif token_id is Token.STR: elif token_id is TokenTypes.STR:
yield (token_id, token_value[1:-1], pos) yield Token(token_id, token_value[1:-1], pos, self._line, at)
elif token_id is Token.INT: elif token_id is TokenTypes.INT:
root = ((16 if len(token_value) > 2 and token_value[1] in 'xX' else 8) root = ((16 if len(token_value) > 2 and token_value[1] in 'xX' else 8)
if token_value.startswith('0') else 10) if token_value.startswith('0') else 10)
yield (token_id, int(token_value, root), pos) yield Token(token_id, int(token_value, root), pos, self._line, at)
elif token_id is Token.FLOAT: elif token_id is TokenTypes.FLOAT:
yield (token_id, float(token_value), pos) yield Token(token_id, float(token_value), pos, self._line, at)
elif token_id is Token.REGEX: elif token_id is TokenTypes.REGEX:
# TODO error handling # TODO error handling
regex = re.compile(feed_m.group('rebody')) regex = re.compile(feed_m.group('rebody'))
yield (token_id, (regex, feed_m.group('reflags')), pos) yield Token(token_id, (regex, feed_m.group('reflags')), pos, self._line, at)
elif token_id is Token.ID: elif token_id is TokenTypes.ID:
yield (token_id, token_value, pos) yield Token(token_id, token_value, pos, self._line, at)
elif token_id in _operator_lookup: elif token_id in _operator_lookup:
yield (token_id if token_value != 'in' else Token.IN, yield Token(token_id if token_value != 'in' else TokenTypes.IN,
_operator_lookup[token_id][token_value], _operator_lookup[token_id][token_value],
pos) pos, self._line, at)
elif token_id is Token.PUNCT: elif token_id is TokenTypes.PUNCT:
yield (_PUNCTUATIONS[token_value], token_value, pos) yield Token(_PUNCTUATIONS[token_value], token_value, pos, self._line, at)
else: else:
raise ExtractorError('Unexpected token at %d' % pos) raise ExtractorError('Unexpected token at %d' % pos)
pos = feed_m.end() pos = feed_m.end()
@ -157,17 +179,24 @@ class TokenStream(object):
def chk_id(self, last=False): def chk_id(self, last=False):
if last: if last:
name, value, pos = self._last token = self._last
else: else:
name, value, pos = self.peek() token = self.peek()
if name is not Token.ID or value in _reserved_words: if token.id is not TokenTypes.ID or token.value in _reserved_words:
raise ExtractorError('Invalid identifier at %d' % pos) raise ExtractorError('Invalid identifier at %d' % token.pos)
def peek(self, count=1): def peek(self, count=1):
for _ in range(count - len(self.peeked)): for _ in range(count - len(self.peeked)):
token = next(self._ts, None) token = next(self._ts, None)
if token is None: if token is None:
self.peeked.append((Token.END, ';', len(self.code))) pos = len(self.code)
lt_count, lt_match = 0, None
for lt_count, lt_match in enumerate(_line_terminator.finditer(self.code)): pass
lt_last = pos if lt_match is None else pos + lt_match.start()
at = pos - lt_last
self.peeked.append(Token(TokenTypes.END, ';', pos, self._line, at))
else: else:
self.peeked.append(token) self.peeked.append(token)
return self.peeked[count - 1] return self.peeked[count - 1]