debian-mirror-gitlab/lib/gitlab/ci/pipeline/expression/lexer.rb

74 lines
1.8 KiB
Ruby
Raw Normal View History

2018-12-13 13:39:08 +05:30
# frozen_string_literal: true
2018-03-27 19:54:05 +05:30
module Gitlab
module Ci
module Pipeline
module Expression
class Lexer
include ::Gitlab::Utils::StrongMemoize
2018-11-08 19:23:39 +05:30
SyntaxError = Class.new(Expression::ExpressionError)
2018-03-27 19:54:05 +05:30
LEXEMES = [
2020-10-24 23:57:45 +05:30
Expression::Lexeme::ParenthesisOpen,
Expression::Lexeme::ParenthesisClose,
2019-09-04 21:01:54 +05:30
Expression::Lexeme::Variable,
Expression::Lexeme::String,
Expression::Lexeme::Pattern,
Expression::Lexeme::Null,
Expression::Lexeme::Equals,
Expression::Lexeme::Matches,
Expression::Lexeme::NotEquals,
Expression::Lexeme::NotMatches,
Expression::Lexeme::And,
Expression::Lexeme::Or
].freeze
2020-10-24 23:57:45 +05:30
def self.lexemes
2020-11-24 15:15:51 +05:30
LEXEMES
2020-10-24 23:57:45 +05:30
end
2018-03-27 19:54:05 +05:30
MAX_TOKENS = 100
def initialize(statement, max_tokens: MAX_TOKENS)
@scanner = StringScanner.new(statement)
@max_tokens = max_tokens
end
def tokens
strong_memoize(:tokens) { tokenize }
end
def lexemes
tokens.map(&:to_lexeme)
end
private
def tokenize
tokens = []
@max_tokens.times do
@scanner.skip(/\s+/) # ignore whitespace
return tokens if @scanner.eos?
2020-10-24 23:57:45 +05:30
lexeme = self.class.lexemes.find do |type|
2018-03-27 19:54:05 +05:30
type.scan(@scanner).tap do |token|
tokens.push(token) if token.present?
end
end
unless lexeme.present?
raise Lexer::SyntaxError, 'Unknown lexeme found!'
end
end
raise Lexer::SyntaxError, 'Too many tokens!'
end
end
end
end
end
end