Compare commits
3 Commits
Author | SHA1 | Date |
---|---|---|
Jeff Yates | e2aaa7a2b5 | |
Jeff Yates | fa39e4d040 | |
Jeff Yates | ddb4003e66 |
11
sorter.rb
11
sorter.rb
|
@ -148,7 +148,7 @@ def process_file (file_name, binfile, type)
|
|||
sections.delete_at(0) #we can ignore the first chunk of text
|
||||
sections.each do |chunk|
|
||||
timestamp = chunk.lines.delete_if {|line| line == "\r\n"}[0] #pulling out the timestamp
|
||||
timestamp.tr!('/','-').tr!(':','').tr!(' ','_') #remove slashes and colons from timestamp, replaces spaces with unserscores
|
||||
timestamp.tr!('/','-').tr!(':','').tr!(' ','_') #remove slashes and colons from timestamp, replaces spaces with underscores
|
||||
timestamp.strip!
|
||||
output = Hash.new #Creating the output storage object
|
||||
outfile = file_name + '_' + timestamp
|
||||
|
@ -161,7 +161,10 @@ def process_file (file_name, binfile, type)
|
|||
output[key] = Hash.new
|
||||
output[key][:words] = bin_counter(bins[bin_number], text)
|
||||
output[key][:total] = count_total(output[key])
|
||||
output[key][:frequency] = output[key][:total].to_f / output[:total_words].to_f
|
||||
output[key][:frequency] = 0
|
||||
#output[key][:frequency] = output[key][:total].to_f / output[:total_words].to_f if output[[:total_words] != 0
|
||||
freq = output[key][:total].to_f / output[:total_words].to_f
|
||||
output[key][:frequency] = freq.to_s
|
||||
end
|
||||
write_output_json(output,outfile + '-out.json')
|
||||
write_output_csv(output,outfile + '-out.csv')
|
||||
|
@ -176,11 +179,15 @@ end
|
|||
#
|
||||
#This method will process all .txt files in the supplied directory
|
||||
def process_dir(dir_name, binfile, type)
|
||||
threads = []
|
||||
Dir.glob(dir_name + '*.txt') do |file_name|
|
||||
threads << Thread.new do
|
||||
puts "Processing " + file_name
|
||||
process_file(file_name, binfile, type)
|
||||
end
|
||||
end
|
||||
threads.each { |thr| thr.join }
|
||||
end
|
||||
|
||||
def generate_master_output(dir_name, binfile)
|
||||
file=File.open(binfile,"r")
|
||||
|
|
Loading…
Reference in New Issue