class VoightKampff::Test
Constants
- CRAWLERS_FILENAME
Attributes
user_agent_string[RW]
Public Class Methods
new(user_agent_string)
click to toggle source
# File lib/voight_kampff/test.rb, line 7 def initialize(user_agent_string) @user_agent_string = user_agent_string end
Public Instance Methods
agent()
click to toggle source
# File lib/voight_kampff/test.rb, line 11 def agent @agent ||= matching_crawler || {} end
bot?()
click to toggle source
# File lib/voight_kampff/test.rb, line 19 def bot? !human? end
Also aliased as: replicant?
human?()
click to toggle source
# File lib/voight_kampff/test.rb, line 15 def human? agent.empty? end
Private Instance Methods
crawler_regexp()
click to toggle source
# File lib/voight_kampff/test.rb, line 46 def crawler_regexp @@crawler_regexp ||= begin # NOTE: This is admittedly a bit convoluted but the performance gains make it worthwhile index = -1 crawler_patterns = crawlers.map{|c| index += 1; "(?<match#{index}>#{c["pattern"]})" }.join("|") crawler_patterns = "(#{crawler_patterns})" Regexp.new(crawler_patterns, Regexp::IGNORECASE) end end
crawlers()
click to toggle source
# File lib/voight_kampff/test.rb, line 56 def crawlers @@crawlers ||= JSON.load(File.open(preferred_path, 'r')) end
lookup_paths()
click to toggle source
# File lib/voight_kampff/test.rb, line 26 def lookup_paths # These paths should be orderd by priority base_paths = [] base_paths << Rails.root if defined? Rails base_paths << VoightKampff.root base_paths.map { |p| p.join('config', CRAWLERS_FILENAME) } end
matching_crawler()
click to toggle source
# File lib/voight_kampff/test.rb, line 39 def matching_crawler if match = crawler_regexp.match(@user_agent_string) index = match.names.first.sub(/match/, '').to_i crawlers[index] end end
preferred_path()
click to toggle source
# File lib/voight_kampff/test.rb, line 35 def preferred_path lookup_paths.find { |path| File.exists? path } end