/ common / fingerprints / parser / tokenstrem.go
tokenstrem.go
 1  // Copyright (c) 2024-2026 Tencent Zhuque Lab. All rights reserved.
 2  //
 3  // Licensed under the Apache License, Version 2.0 (the "License");
 4  // you may not use this file except in compliance with the License.
 5  // You may obtain a copy of the License at
 6  //
 7  //     http://www.apache.org/licenses/LICENSE-2.0
 8  //
 9  // Unless required by applicable law or agreed to in writing, software
10  // distributed under the License is distributed on an "AS IS" BASIS,
11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  // See the License for the specific language governing permissions and
13  // limitations under the License.
14  //
15  // Requirement: Any integration or derivative work must explicitly attribute
16  // Tencent Zhuque Lab (https://github.com/Tencent/AI-Infra-Guard) in its
17  // documentation or user interface, as detailed in the NOTICE file.
18  
19  // Package parser 实现词法分析栈结构
20  package parser
21  
22  import "errors"
23  
24  // TokenStream represents a stream of tokens that can be traversed
25  // 表示一个可以遍历的 token 流
26  type tokenStream struct {
27  	tokens      []Token // slice of tokens to process 要处理的token切片
28  	index       int     // current position in the stream 当前处理位置
29  	tokenLength int     // total number of tokens 总token数量
30  }
31  
32  // newTokenStream creates a new token stream from a slice of tokens
33  // 从token切片创建新的token流
34  func newTokenStream(tokens []Token) *tokenStream {
35  	ret := new(tokenStream)
36  	ret.tokens = tokens
37  	ret.tokenLength = len(tokens)
38  	return ret
39  }
40  
41  // rewind moves the current position back by one
42  // 将当前位置回退一步
43  func (ts *tokenStream) rewind() {
44  	ts.index -= 1
45  }
46  
47  // next returns the next token in the stream and advances the position
48  // 返回流中的下一个token并前进位置
49  func (ts *tokenStream) next() (Token, error) {
50  	// Fix the logic error: check bounds before accessing token
51  	if ts.index >= len(ts.tokens) {
52  		return Token{}, errors.New("token index great token's length")
53  	}
54  	token := ts.tokens[ts.index]
55  	ts.index += 1
56  	return token, nil
57  }
58  
59  // hasNext checks if there are more tokens available in the stream
60  // 检查流中是否还有更多token可用
61  func (ts tokenStream) hasNext() bool {
62  	return ts.index < ts.tokenLength
63  }