debian-mirror-gitlab/lib/gitlab/ci/pipeline/expression/lexer.rb

83 lines
2.1 KiB
Ruby
Raw Normal View History

2018-12-13 13:39:08 +05:30
# frozen_string_literal: true
2018-03-27 19:54:05 +05:30
module Gitlab
module Ci
module Pipeline
module Expression
class Lexer
include ::Gitlab::Utils::StrongMemoize
2018-11-08 19:23:39 +05:30
SyntaxError = Class.new(Expression::ExpressionError)
2018-03-27 19:54:05 +05:30
LEXEMES = [
Expression::Lexeme::Variable,
Expression::Lexeme::String,
2018-11-08 19:23:39 +05:30
Expression::Lexeme::Pattern,
2018-03-27 19:54:05 +05:30
Expression::Lexeme::Null,
2018-11-08 19:23:39 +05:30
Expression::Lexeme::Equals,
2019-07-31 22:56:46 +05:30
Expression::Lexeme::Matches,
Expression::Lexeme::NotEquals,
Expression::Lexeme::NotMatches
2018-03-27 19:54:05 +05:30
].freeze
2019-09-04 21:01:54 +05:30
NEW_LEXEMES = [
Expression::Lexeme::Variable,
Expression::Lexeme::String,
Expression::Lexeme::Pattern,
Expression::Lexeme::Null,
Expression::Lexeme::Equals,
Expression::Lexeme::Matches,
Expression::Lexeme::NotEquals,
Expression::Lexeme::NotMatches,
Expression::Lexeme::And,
Expression::Lexeme::Or
].freeze
2018-03-27 19:54:05 +05:30
MAX_TOKENS = 100
def initialize(statement, max_tokens: MAX_TOKENS)
@scanner = StringScanner.new(statement)
@max_tokens = max_tokens
end
def tokens
strong_memoize(:tokens) { tokenize }
end
def lexemes
tokens.map(&:to_lexeme)
end
private
def tokenize
tokens = []
@max_tokens.times do
@scanner.skip(/\s+/) # ignore whitespace
return tokens if @scanner.eos?
2019-09-04 21:01:54 +05:30
lexeme = available_lexemes.find do |type|
2018-03-27 19:54:05 +05:30
type.scan(@scanner).tap do |token|
tokens.push(token) if token.present?
end
end
unless lexeme.present?
raise Lexer::SyntaxError, 'Unknown lexeme found!'
end
end
raise Lexer::SyntaxError, 'Too many tokens!'
end
2019-09-04 21:01:54 +05:30
def available_lexemes
Feature.enabled?(:ci_variables_complex_expressions) ? NEW_LEXEMES : LEXEMES
end
2018-03-27 19:54:05 +05:30
end
end
end
end
end