Commit 26167c24 authored by Grzegorz Bizon's avatar Grzegorz Bizon

Improve pipeline expressions lexer

parent 886988c9
......@@ -3,6 +3,8 @@ module Gitlab
module Pipeline
module Expression
class Lexer
include ::Gitlab::Utils::StrongMemoize
LEXEMES = [
Expression::Lexeme::Variable,
Expression::Lexeme::String,
......@@ -10,34 +12,44 @@ module Gitlab
Expression::Lexeme::Equals
].freeze
MAX_CYCLES = 5
SyntaxError = Class.new(Statement::StatementError)
MAX_TOKENS = 100
def initialize(statement)
@scanner = StringScanner.new(statement)
@tokens = []
end
def tokens
return @tokens if @tokens.any?
def tokens(max: MAX_TOKENS)
strong_memoize(:tokens) { tokenize(max) }
end
def lexemes
tokens.map(&:to_lexeme)
end
private
MAX_CYCLES.times do
LEXEMES.each do |lexeme|
@scanner.skip(/\s+/) # ignore whitespace
def tokenize(max_tokens)
tokens = []
lexeme.scan(@scanner).tap do |token|
@tokens.push(token) if token.present?
max_tokens.times do
@scanner.skip(/\s+/) # ignore whitespace
return tokens if @scanner.eos?
lexeme = LEXEMES.find do |type|
type.scan(@scanner).tap do |token|
tokens.push(token) if token.present?
end
end
return @tokens if @scanner.eos?
unless lexeme.present?
raise Lexer::SyntaxError, 'Unknown lexeme found!'
end
end
raise Lexer::SyntaxError unless @scanner.eos?
end
def lexemes
tokens.map(&:to_lexeme)
raise Lexer::SyntaxError, 'Too many tokens!'
end
end
end
......
......@@ -45,10 +45,10 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
expect(tokens.third.value).to eq '"text"'
end
it 'limits statement to 5 tokens' do
it 'limits statement to specified amount of tokens' do
lexer = described_class.new("$V1 $V2 $V3 $V4 $V5 $V6")
expect { lexer.tokens }
expect { lexer.tokens(max: 5) }
.to raise_error described_class::SyntaxError
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment