@@ -13,44 +13,38 @@ const bracketed = [
13
13
DictionaryExpression , DictionaryComprehensionExpression , SetExpression , SetComprehensionExpression , ArgList , ParamList
14
14
]
15
15
16
- let cachedIndent = 0 , cachedInput = null , cachedPos = 0
17
- function getIndent ( input , pos ) {
18
- if ( pos == cachedPos && input == cachedInput ) return cachedIndent
19
- cachedInput = input ; cachedPos = pos
20
- return cachedIndent = getIndentInner ( input , pos )
21
- }
22
-
23
- function getIndentInner ( input , pos ) {
24
- for ( let indent = 0 ; ; pos ++ ) {
25
- let ch = input . get ( pos )
26
- if ( ch == space ) indent ++
27
- else if ( ch == tab ) indent += 8 - ( indent % 8 )
28
- else if ( ch == newline || ch == carriageReturn || ch == hash ) return - 1
29
- else return indent
30
- }
31
- }
32
-
33
- export const newlines = new ExternalTokenizer ( ( input , token , stack ) => {
34
- let next = input . get ( token . start )
35
- if ( next < 0 ) {
36
- token . accept ( eof , token . start )
37
- } else if ( next != newline && next != carriageReturn ) {
16
+ export const newlines = new ExternalTokenizer ( ( input , stack ) => {
17
+ if ( input . next < 0 ) {
18
+ input . acceptToken ( eof )
19
+ } else if ( input . next != newline && input . next != carriageReturn ) {
38
20
} else if ( stack . startOf ( bracketed ) != null ) {
39
- token . accept ( newlineBracketed , token . start + 1 )
40
- } else if ( getIndent ( input , token . start + 1 ) < 0 ) {
41
- token . accept ( newlineEmpty , token . start + 1 )
21
+ input . acceptToken ( newlineBracketed , 1 )
42
22
} else {
43
- token . accept ( newlineToken , token . start + 1 )
23
+ input . advance ( )
24
+ let space = 0
25
+ while ( input . next == space || input . next == tab ) { input . advance ( ) ; space ++ }
26
+ let empty = input . next == newline || input . next == carriageReturn || input . next == hash
27
+ input . acceptToken ( empty ? newlineEmpty : newlineToken , - space )
44
28
}
45
29
} , { contextual : true , fallback : true } )
46
30
47
- export const indentation = new ExternalTokenizer ( ( input , token , stack ) => {
48
- let prev = input . get ( token . start - 1 ) , depth
49
- if ( ( prev == newline || prev == carriageReturn ) &&
50
- ( depth = getIndent ( input , token . start ) ) >= 0 &&
51
- depth != stack . context . depth &&
52
- stack . startOf ( bracketed ) == null )
53
- token . accept ( depth < stack . context . depth ? dedent : indent , token . start )
31
+ export const indentation = new ExternalTokenizer ( ( input , stack ) => {
32
+ let prev = input . peek ( - 1 ) , depth
33
+ if ( ( prev == newline || prev == carriageReturn ) && stack . startOf ( bracketed ) == null ) {
34
+ let depth = 0 , chars = 0
35
+ for ( ; ; ) {
36
+ if ( input . next == space ) depth ++
37
+ else if ( input . next == tab ) depth += 8 - ( depth % 8 )
38
+ else break
39
+ input . advance ( )
40
+ chars ++
41
+ }
42
+ if ( depth != stack . context . depth &&
43
+ input . next != newline && input . next != carriageReturn && input . next != hash ) {
44
+ if ( depth < stack . context . depth ) input . acceptToken ( dedent , - chars )
45
+ else input . acceptToken ( indent )
46
+ }
47
+ }
54
48
} )
55
49
56
50
function IndentLevel ( parent , depth ) {
@@ -63,24 +57,23 @@ const topIndent = new IndentLevel(null, 0)
63
57
64
58
export const trackIndent = new ContextTracker ( {
65
59
start : topIndent ,
66
- shift ( context , term , input , stack ) {
67
- return term == indent ? new IndentLevel ( context , getIndent ( input , stack . pos ) ) :
68
- term == dedent ? context . parent : context
60
+ shift ( context , term , stack , input ) {
61
+ return term == indent ? new IndentLevel ( context , stack . pos - input . pos ) : term == dedent ? context . parent : context
69
62
} ,
70
63
hash ( context ) { return context . hash }
71
64
} )
72
65
73
- export const legacyPrint = new ExternalTokenizer ( ( input , token ) => {
74
- let pos = token . start
75
- for ( let print = "print" , i = 0 ; i < print . length ; i ++ , pos ++ )
76
- if ( input . get ( pos ) != print . charCodeAt ( i ) ) return
77
- let end = pos
78
- if ( / \w / . test ( String . fromCharCode ( input . get ( pos ) ) ) ) return
79
- for ( ; ; pos ++ ) {
80
- let next = input . get ( pos )
66
+ export const legacyPrint = new ExternalTokenizer ( input => {
67
+ for ( let i = 0 ; i < 5 ; i ++ ) {
68
+ if ( input . next ! = "print" . charCodeAt ( i ) ) return
69
+ input . advance ( )
70
+ }
71
+ if ( / \w / . test ( String . fromCharCode ( input . next ) ) ) return
72
+ for ( let off = 0 ; ; off ++ ) {
73
+ let next = input . peek ( off )
81
74
if ( next == space || next == tab ) continue
82
75
if ( next != parenOpen && next != dot && next != newline && next != carriageReturn && next != hash )
83
- token . accept ( printKeyword , end )
76
+ input . acceptToken ( printKeyword )
84
77
return
85
78
}
86
79
} )
0 commit comments