require 'fileutils'
require 'ferret'
+require 'fastthread'
+
begin
require 'chronic'
$have_chronic = true
include Singleton
+ ## these two accessors should ONLY be used by single-threaded programs.
+ ## otherwise you will have a naughty ferret on your hands.
attr_reader :index
alias ferret index
+
def initialize dir=BASE_DIR
+ @index_mutex = Monitor.new
+
@dir = dir
@sources = {}
@sources_dirty = false
+ @source_mutex = Monitor.new
wsa = Ferret::Analysis::WhiteSpaceAnalyzer.new false
sa = Ferret::Analysis::StandardAnalyzer.new [], true
@lock_update_thread = nil
end
+ def possibly_pluralize number_of, kind
+ "#{number_of} #{kind}" +
+ if number_of == 1 then "" else "s" end
+ end
+
def fancy_lock_error_message_for e
- secs = Time.now - e.mtime
- mins = secs.to_i / 60
+ secs = (Time.now - e.mtime).to_i
+ mins = secs / 60
time =
if mins == 0
- "#{secs.to_i} seconds"
+ possibly_pluralize secs , "second"
else
- "#{mins} minutes"
+ possibly_pluralize mins, "minute"
end
<<EOS
end
def add_source source
- raise "duplicate source!" if @sources.include? source
- @sources_dirty = true
- max = @sources.max_of { |id, s| s.is_a?(DraftLoader) || s.is_a?(SentLoader) ? 0 : id }
- source.id ||= (max || 0) + 1
- ##source.id += 1 while @sources.member? source.id
- @sources[source.id] = source
+ @source_mutex.synchronize do
+ raise "duplicate source!" if @sources.include? source
+ @sources_dirty = true
+ max = @sources.max_of { |id, s| s.is_a?(DraftLoader) || s.is_a?(SentLoader) ? 0 : id }
+ source.id ||= (max || 0) + 1
+ ##source.id += 1 while @sources.member? source.id
+ @sources[source.id] = source
+ end
end
def sources
## favour the inbox by listing non-archived sources first
- @sources.values.sort_by { |s| s.id }.partition { |s| !s.archived? }.flatten
+ @source_mutex.synchronize { @sources.values }.sort_by { |s| s.id }.partition { |s| !s.archived? }.flatten
end
def source_for uri; sources.find { |s| s.is_source_for? uri }; end
def load_index dir=File.join(@dir, "ferret")
if File.exists? dir
Redwood::log "loading index..."
- @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer)
- Redwood::log "loaded index of #{@index.size} messages"
+ @index_mutex.synchronize do
+ @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer)
+ Redwood::log "loaded index of #{@index.size} messages"
+ end
else
Redwood::log "creating index..."
- field_infos = Ferret::Index::FieldInfos.new :store => :yes
- field_infos.add_field :message_id
- field_infos.add_field :source_id
- field_infos.add_field :source_info
- field_infos.add_field :date, :index => :untokenized
- field_infos.add_field :body, :store => :no
- field_infos.add_field :label
- field_infos.add_field :subject
- field_infos.add_field :from
- field_infos.add_field :to
- field_infos.add_field :refs
- field_infos.add_field :snippet, :index => :no, :term_vector => :no
- field_infos.create_index dir
- @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer)
+ @index_mutex.synchronize do
+ field_infos = Ferret::Index::FieldInfos.new :store => :yes
+ field_infos.add_field :message_id, :index => :untokenized
+ field_infos.add_field :source_id
+ field_infos.add_field :source_info
+ field_infos.add_field :date, :index => :untokenized
+ field_infos.add_field :body
+ field_infos.add_field :label
+ field_infos.add_field :attachments
+ field_infos.add_field :subject
+ field_infos.add_field :from
+ field_infos.add_field :to
+ field_infos.add_field :refs
+ field_infos.add_field :snippet, :index => :no, :term_vector => :no
+ field_infos.create_index dir
+ @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer)
+ end
end
end
## and adding either way. Index state will be determined by m.labels.
##
## docid and entry can be specified if they're already known.
- def sync_message m, docid=nil, entry=nil
+ def sync_message m, docid=nil, entry=nil, opts={}
docid, entry = load_entry_for_id m.id unless docid && entry
raise "no source info for message #{m.id}" unless m.source && m.source_info
- raise "trying to delete non-corresponding entry #{docid} with index message-id #{@index[docid][:message_id].inspect} and parameter message id #{m.id.inspect}" if docid && @index[docid][:message_id] != m.id
+ @index_mutex.synchronize do
+ raise "trying to delete non-corresponding entry #{docid} with index message-id #{@index[docid][:message_id].inspect} and parameter message id #{m.id.inspect}" if docid && @index[docid][:message_id] != m.id
+ end
source_id =
if m.source.is_a? Integer
m.source.id or raise "unregistered source #{m.source} (id #{m.source.id.inspect})"
end
- to = (m.to + m.cc + m.bcc).map { |x| x.email }.join(" ")
snippet =
if m.snippet_contains_encrypted_content? && $config[:discard_snippets_from_encrypted_messages]
""
m.snippet
end
+ ## write the new document to the index. if the entry already exists in the
+ ## index, reuse it (which avoids having to reload the entry from the source,
+ ## which can be quite expensive for e.g. large threads of IMAP actions.)
+ ##
+ ## exception: if the index entry belongs to an earlier version of the
+ ## message, use everything from the new message instead, but union the
+ ## flags. this allows messages sent to mailing lists to have their header
+ ## updated and to have flags set properly.
+ ##
+ ## minor hack: messages in sources with lower ids have priority over
+ ## messages in sources with higher ids. so messages in the inbox will
+ ## override everyone, and messages in the sent box will be overridden
+ ## by everyone else.
+ ##
+ ## written in this manner to support previous versions of the index which
+ ## did not keep around the entry body. upgrading is thus seamless.
+ entry ||= {}
+ labels = m.labels.uniq # override because this is the new state, unless...
+
+ ## if we are a later version of a message, ignore what's in the index,
+ ## but merge in the labels.
+ if entry[:source_id] && entry[:source_info] && entry[:label] &&
+ ((entry[:source_id].to_i > source_id) || (entry[:source_info].to_i < m.source_info))
+ labels = (entry[:label].split(/\s+/).map { |l| l.intern } + m.labels).uniq
+ #Redwood::log "found updated version of message #{m.id}: #{m.subj}"
+ #Redwood::log "previous version was at #{entry[:source_id].inspect}:#{entry[:source_info].inspect}, this version at #{source_id.inspect}:#{m.source_info.inspect}"
+ #Redwood::log "merged labels are #{labels.inspect} (index #{entry[:label].inspect}, message #{m.labels.inspect})"
+ entry = {}
+ end
+
+ ## if force_overwite is true, ignore what's in the index. this is used
+ ## primarily by sup-sync to force index updates.
+ entry = {} if opts[:force_overwrite]
+
d = {
:message_id => m.id,
:source_id => source_id,
:source_info => m.source_info,
- :date => m.date.to_indexable_s,
- :body => m.indexable_content,
- :snippet => snippet,
- :label => m.labels.uniq.join(" "),
- :from => m.from ? m.from.indexable_content : "",
+ :date => (entry[:date] || m.date.to_indexable_s),
+ :body => (entry[:body] || m.indexable_content),
+ :snippet => snippet, # always override
+ :label => labels.uniq.join(" "),
+ :attachments => (entry[:attachments] || m.attachments.uniq.join(" ")),
+
+ ## always override :from and :to.
+ ## older versions of Sup would often store the wrong thing in the index
+ ## (because they were canonicalizing email addresses, resulting in the
+ ## wrong name associated with each.) the correct address is read from
+ ## the original header when these messages are opened in thread-view-mode,
+ ## so this allows people to forcibly update the address in the index by
+ ## marking those threads for saving.
+ :from => (m.from ? m.from.indexable_content : ""),
:to => (m.to + m.cc + m.bcc).map { |x| x.indexable_content }.join(" "),
- :subject => wrap_subj(Message.normalize_subj(m.subj)),
- :refs => (m.refs + m.replytos).uniq.join(" "),
+
+ :subject => (entry[:subject] || wrap_subj(Message.normalize_subj(m.subj))),
+ :refs => (entry[:refs] || (m.refs + m.replytos).uniq.join(" ")),
}
- @index.delete docid if docid
- @index.add_document d
-
+ @index_mutex.synchronize do
+ @index.delete docid if docid
+ @index.add_document d
+ end
+
docid, entry = load_entry_for_id m.id
## this hasn't been triggered in a long time. TODO: decide whether it's still a problem.
raise "just added message #{m.id.inspect} but couldn't find it in a search" unless docid
end
def contains_id? id
- @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0
+ @index_mutex.synchronize { @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0 }
end
- def contains? m; contains_id? m.id; end
- def size; @index.size; end
+ def contains? m; contains_id? m.id end
+ def size; @index_mutex.synchronize { @index.size } end
+ def empty?; size == 0 end
## you should probably not call this on a block that doesn't break
## rather quickly because the results can be very large.
EACH_BY_DATE_NUM = 100
def each_id_by_date opts={}
- return if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted
+ return if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted
query = build_query opts
offset = 0
while true
- results = @index.search(query, :sort => "date DESC", :limit => EACH_BY_DATE_NUM, :offset => offset)
+ limit = (opts[:limit])? [EACH_BY_DATE_NUM, opts[:limit] - offset].min : EACH_BY_DATE_NUM
+ results = @index_mutex.synchronize { @index.search query, :sort => "date DESC", :limit => limit, :offset => offset }
Redwood::log "got #{results.total_hits} results for query (offset #{offset}) #{query.inspect}"
- results.hits.each { |hit| yield @index[hit.doc][:message_id], lambda { build_message hit.doc } }
- break if offset >= results.total_hits - EACH_BY_DATE_NUM
- offset += EACH_BY_DATE_NUM
+ results.hits.each do |hit|
+ yield @index_mutex.synchronize { @index[hit.doc][:message_id] }, lambda { build_message hit.doc }
+ end
+ break if opts[:limit] and offset >= opts[:limit] - limit
+ break if offset >= results.total_hits - limit
+ offset += limit
end
end
def num_results_for opts={}
- return 0 if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted
+ return 0 if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted
q = build_query opts
- index.search(q, :limit => 1).total_hits
+ @index_mutex.synchronize { @index.search(q, :limit => 1).total_hits }
end
## yield all messages in the thread containing 'm' by repeatedly
searched = {}
num_queries = 0
+ pending = [m.id]
if $config[:thread_by_subject] # do subject queries
date_min = m.date - (SAME_SUBJECT_DATE_LIMIT * 12 * 3600)
date_max = m.date + (SAME_SUBJECT_DATE_LIMIT * 12 * 3600)
q = build_query :qobj => q
- pending = @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] }
- Redwood::log "found #{pending.size} results for subject query #{q}"
- else
- pending = [m.id]
+ p1 = @index_mutex.synchronize { @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] } }
+ Redwood::log "found #{p1.size} results for subject query #{q}"
+
+ p2 = @index_mutex.synchronize { @index.search(q.to_s, :limit => :all).hits.map { |hit| @index[hit.doc][:message_id] } }
+ Redwood::log "found #{p2.size} results in string form"
+
+ pending = (pending + p1 + p2).uniq
end
until pending.empty? || (opts[:limit] && messages.size >= opts[:limit])
num_queries += 1
killed = false
- @index.search_each(q, :limit => :all) do |docid, score|
- break if opts[:limit] && messages.size >= opts[:limit]
- if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed]
- killed = true
- break
- end
- mid = @index[docid][:message_id]
- unless messages.member?(mid)
- #Redwood::log "got #{mid} as a child of #{id}"
- messages[mid] ||= lambda { build_message docid }
- refs = @index[docid][:refs].split(" ")
- pending += refs.select { |id| !searched[id] }
+ @index_mutex.synchronize do
+ @index.search_each(q, :limit => :all) do |docid, score|
+ break if opts[:limit] && messages.size >= opts[:limit]
+ if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed]
+ killed = true
+ break
+ end
+ mid = @index[docid][:message_id]
+ unless messages.member?(mid)
+ #Redwood::log "got #{mid} as a child of #{id}"
+ messages[mid] ||= lambda { build_message docid }
+ refs = @index[docid][:refs].split(" ")
+ pending += refs.select { |id| !searched[id] }
+ end
end
end
end
## builds a message object from a ferret result
def build_message docid
- doc = @index[docid]
- source = @sources[doc[:source_id].to_i]
- #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})"
- raise "invalid source #{doc[:source_id]}" unless source
-
- fake_header = {
- "date" => Time.at(doc[:date].to_i),
- "subject" => unwrap_subj(doc[:subject]),
- "from" => doc[:from],
- "to" => doc[:to].split(/\s+/).join(", "), # reformat
- "message-id" => doc[:message_id],
- "references" => doc[:refs].split(/\s+/).map { |x| "<#{x}>" }.join(" "),
- }
-
- Message.new :source => source, :source_info => doc[:source_info].to_i,
- :labels => doc[:label].split(" ").map { |s| s.intern },
- :snippet => doc[:snippet], :header => fake_header
+ @index_mutex.synchronize do
+ doc = @index[docid]
+
+ source = @source_mutex.synchronize { @sources[doc[:source_id].to_i] }
+ raise "invalid source #{doc[:source_id]}" unless source
+
+ #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})"
+
+ fake_header = {
+ "date" => Time.at(doc[:date].to_i),
+ "subject" => unwrap_subj(doc[:subject]),
+ "from" => doc[:from],
+ "to" => doc[:to].split(/\s+/).join(", "), # reformat
+ "message-id" => doc[:message_id],
+ "references" => doc[:refs].split(/\s+/).map { |x| "<#{x}>" }.join(" "),
+ }
+
+ Message.new :source => source, :source_info => doc[:source_info].to_i,
+ :labels => doc[:label].split(" ").map { |s| s.intern },
+ :snippet => doc[:snippet], :header => fake_header
+ end
end
def fresh_thread_id; @next_thread_id += 1; end
def wrap_subj subj; "__START_SUBJECT__ #{subj} __END_SUBJECT__"; end
def unwrap_subj subj; subj =~ /__START_SUBJECT__ (.*?) __END_SUBJECT__/ && $1; end
- def drop_entry docno; @index.delete docno; end
+ def drop_entry docno; @index_mutex.synchronize { @index.delete docno } end
def load_entry_for_id mid
- results = @index.search(Ferret::Search::TermQuery.new(:message_id, mid))
- return if results.total_hits == 0
- docid = results.hits[0].doc
- [docid, @index[docid]]
+ @index_mutex.synchronize do
+ results = @index.search Ferret::Search::TermQuery.new(:message_id, mid)
+ return if results.total_hits == 0
+ docid = results.hits[0].doc
+ entry = @index[docid]
+ entry_dup = entry.fields.inject({}) { |h, f| h[f] = entry[f]; h }
+ [docid, entry_dup]
+ end
end
def load_contacts emails, h={}
Redwood::log "contact search: #{q}"
contacts = {}
num = h[:num] || 20
- @index.search_each(q, :sort => "date DESC", :limit => :all) do |docid, score|
- break if contacts.size >= num
- #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}"
- f = @index[docid][:from]
- t = @index[docid][:to]
-
- if AccountManager.is_account_email? f
- t.split(" ").each { |e| contacts[PersonManager.person_for(e)] = true }
- else
- contacts[PersonManager.person_for(f)] = true
+ @index_mutex.synchronize do
+ @index.search_each q, :sort => "date DESC", :limit => :all do |docid, score|
+ break if contacts.size >= num
+ #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}"
+ f = @index[docid][:from]
+ t = @index[docid][:to]
+
+ if AccountManager.is_account_email? f
+ t.split(" ").each { |e| contacts[Person.from_address(e)] = true }
+ else
+ contacts[Person.from_address(f)] = true
+ end
end
end
def load_sources fn=Redwood::SOURCE_FN
source_array = (Redwood::load_yaml_obj(fn) || []).map { |o| Recoverable.new o }
- @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten]
- @sources_dirty = false
+ @source_mutex.synchronize do
+ @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten]
+ @sources_dirty = false
+ end
end
def has_any_from_source_with_label? source, label
q = Ferret::Search::BooleanQuery.new
q.add_query Ferret::Search::TermQuery.new("source_id", source.id.to_s), :must
q.add_query Ferret::Search::TermQuery.new("label", label.to_s), :must
- index.search(q, :limit => 1).total_hits > 0
+ @index_mutex.synchronize { @index.search(q, :limit => 1).total_hits > 0 }
end
protected
def parse_user_query_string s
extraopts = {}
- ## this is a little hacky, but it works, at least until ferret changes
- ## its api. we parse the user query string with ferret twice: the first
- ## time we just turn the resulting object back into a string, which has
- ## the next effect of transforming the original string into a nice
- ## normalized form with + and - instead of AND, OR, etc. then we do some
- ## string substitutions which depend on this normalized form, re-parse
- ## the string with Ferret, and return the resulting query object.
-
- norms = @qparser.parse(s).to_s
- Redwood::log "normalized #{s.inspect} to #{norms.inspect}" unless s == norms
-
- subs = norms.gsub(/\b(to|from):(\S+)\b/) do
+ subs = s.gsub(/\b(to|from):(\S+)\b/) do
field, name = $1, $2
if(p = ContactManager.contact_for(name))
[field, p.email]
extraopts[:load_deleted] = true if subs =~ /\blabel:deleted\b/
## gmail style "is" operator
- subs = subs.gsub(/\b(is):(\S+)\b/) do
+ subs = subs.gsub(/\b(is|has):(\S+)\b/) do
field, label = $1, $2
case label
when "read"
end
end
+ ## gmail style attachments "filename" and "filetype" searches
+ subs = subs.gsub(/\b(filename|filetype):(\((.+?)\)\B|(\S+)\b)/) do
+ field, name = $1, ($3 || $4)
+ case field
+ when "filename"
+ Redwood::log "filename - translated #{field}:#{name} to attachments:(#{name.downcase})"
+ "attachments:(#{name.downcase})"
+ when "filetype"
+ Redwood::log "filetype - translated #{field}:#{name} to attachments:(*.#{name.downcase})"
+ "attachments:(*.#{name.downcase})"
+ end
+ end
+
if $have_chronic
chronic_failure = false
subs = subs.gsub(/\b(before|on|in|during|after):(\((.+?)\)\B|(\S+)\b)/) do
end
subs = nil if chronic_failure
end
+
+ ## limit:42 restrict the search to 42 results
+ subs = subs.gsub(/\blimit:(\S+)\b/) do
+ lim = $1
+ if lim =~ /^\d+$/
+ extraopts[:limit] = lim.to_i
+ ''
+ else
+ BufferManager.flash "Can't understand limit #{lim.inspect}!"
+ subs = nil
+ end
+ end
- Redwood::log "translated #{norms.inspect} to #{subs.inspect}" unless subs == norms
if subs
[@qparser.parse(subs), extraopts]
else
end
def save_sources fn=Redwood::SOURCE_FN
- if @sources_dirty || @sources.any? { |id, s| s.dirty? }
- bakfn = fn + ".bak"
- if File.exists? fn
+ @source_mutex.synchronize do
+ if @sources_dirty || @sources.any? { |id, s| s.dirty? }
+ bakfn = fn + ".bak"
+ if File.exists? fn
+ File.chmod 0600, fn
+ FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0
+ end
+ Redwood::save_yaml_obj sources.sort_by { |s| s.id.to_i }, fn, true
File.chmod 0600, fn
- FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0
end
- Redwood::save_yaml_obj sources.sort_by { |s| s.id.to_i }, fn, true
- File.chmod 0600, fn
+ @sources_dirty = false
end
- @sources_dirty = false
end
end