SHIBATA Hiroshi
null****@okkez*****
2014年 3月 6日 (木) 19:32:56 JST
SHIBATA Hiroshi 2014-03-06 19:32:56 +0900 (Thu, 06 Mar 2014) New Revision: b48d9788b290bdcd010fd8aacd80624bcb4a429f https://github.com/hiki/hiki/commit/b48d9788b290bdcd010fd8aacd80624bcb4a429f Message: transpec Modified files: spec/50bayes_filter_spec.rb spec/bayes_filter_spec.rb spec/filter_spec.rb Modified: spec/50bayes_filter_spec.rb (+54 -54) =================================================================== --- spec/50bayes_filter_spec.rb 2014-03-06 19:32:32 +0900 (b32ece2) +++ spec/50bayes_filter_spec.rb 2014-03-06 19:32:56 +0900 (b8e166d) @@ -24,7 +24,7 @@ class << Object.new @base_url = "http://www.example.org/hiki.cgi" @opt = {} - @conf = stub("conf", + @conf = double("conf", data_path:@tmpdir, cache_path:"#{@tmpdir}/cache", bayes_threshold:nil, @@ -32,8 +32,8 @@ class << Object.new cgi_name:@base_url, index_url:@base_url, null_object:false) - @conf.should_receive("[]".intern).any_number_of_times{|k| @opt[k]} - @conf.should_receive("[]=".intern).any_number_of_times{|k, v| @opt[k]=v} + allow(@conf).to receive("[]".intern){|k| @opt[k]} + allow(@conf).to receive("[]=".intern){|k, v| @opt[k]=v} BayesFilter.init(@conf) end @@ -43,12 +43,12 @@ class << Object.new ex.before do @params = Hash.new{|h, k| h[k]=[]} - @cgi = stub("cgi", + @cgi = double("cgi", params:@params, request_method:"POST", null_object:false) @pages = [] - @db = stub("db", + @db = double("db", pages:@pages, null_object:false) @c = BayesFilterConfig.new(@cgi, @conf, "saveconf", @db) @@ -65,7 +65,7 @@ class << Object.new end [EN].each do |m| - JA::BayesFilterConfig::Res.methods.sort.should == m::BayesFilterConfig::Res.methods.sort + expect(JA::BayesFilterConfig::Res.methods.sort).to eq(m::BayesFilterConfig::Res.methods.sort) end end end @@ -74,22 +74,22 @@ class << Object.new include Common it "html" do - lambda{@c.html}.should_not raise_error + expect{@c.html}.not_to raise_error end it "conf_url" do - @c.conf_url.should == "#{@base_url}?c=admin;conf=bayes_filter" - @c.conf_url("hoge").should == "#{@base_url}?c=admin;conf=bayes_filter;bfmode=hoge" + expect(@c.conf_url).to eq("#{@base_url}?c=admin;conf=bayes_filter") + expect(@c.conf_url("hoge")).to eq("#{@base_url}?c=admin;conf=bayes_filter;bfmode=hoge") end it "save_mode?" do - @c.save_mode?.should be_true - @cgi.stub!(:request_method).and_return("GET") - @c.save_mode?.should be_false - @cgi.stub!(:request_method).and_return("POST") - @c.save_mode?.should be_true + expect(@c.save_mode?).to be_true + allow(@cgi).to receive(:request_method).and_return("GET") + expect(@c.save_mode?).to be_false + allow(@cgi).to receive(:request_method).and_return("POST") + expect(@c.save_mode?).to be_true @c.instance_variable_set("@confmode", "conf") - @c.save_mode?.should be_false + expect(@c.save_mode?).to be_false end end @@ -112,9 +112,9 @@ class << Object.new end it "setting test" do - @ham.ham?.should be_true - @spam.ham?.should be_false - @doubt.ham?.should be_nil + expect(@ham.ham?).to be_true + expect(@spam.ham?).to be_false + expect(@doubt.ham?).to be_nil end it "html" do @@ -123,12 +123,12 @@ class << Object.new it "submitted_pages" do l =****@c*****_pages - l.ham.values.map{|i| i.cache_file_name}.should == [@ham.cache_file_name] - l.ham.each_pair{|k, d| k.should == d.cache_file_name[/H\d+$/]} - l.spam.values.map{|i| i.cache_file_name}.should == [@spam.cache_file_name] - l.spam.each_pair{|k, d| k.should == d.cache_file_name[/S\d+$/]} - l.doubt.values.map{|i| i.cache_file_name}.should == [@doubt.cache_file_name] - l.doubt.each_pair{|k, d| k.should == d.cache_file_name[/D\d+$/]} + expect(l.ham.values.map{|i| i.cache_file_name}).to eq([@ham.cache_file_name]) + l.ham.each_pair{|k, d| expect(k).to eq(d.cache_file_name[/H\d+$/])} + expect(l.spam.values.map{|i| i.cache_file_name}).to eq([@spam.cache_file_name]) + l.spam.each_pair{|k, d| expect(k).to eq(d.cache_file_name[/S\d+$/])} + expect(l.doubt.values.map{|i| i.cache_file_name}).to eq([@doubt.cache_file_name]) + l.doubt.each_pair{|k, d| expect(k).to eq(d.cache_file_name[/D\d+$/])} end end @@ -144,29 +144,29 @@ class << Object.new bp = Hiki::Filter::BayesFilter::PageData @ham = bp.new(pd.new("ham", "ham", "ham", "ham", "127.0.0.1")) @ham.cache_save - @ham.ham?.should be_true + expect(@ham.ham?).to be_true @spam = bp.new(pd.new("spam", "spam", "spam", "spam", "127.0.0.1")) @spam.cache_save - @spam.ham?.should be_false + expect(@spam.ham?).to be_false @doubt = bp.new(pd.new("ham spam", "ham spam", "ham spam", "ham spam", "127.0.0.1")) @doubt.cache_save - @doubt.ham?.should be_nil + expect(@doubt.ham?).to be_nil end it "html" do - @c.should_receive(:process_page_data){@c.proxied_by_rspec__process_page_data} - lambda{@c.html}.should_not raise_error + expect(@c).to receive(:process_page_data){@c.proxied_by_rspec__process_page_data} + expect{@c.html}.not_to raise_error end it "process data" do ham_id = "H#{@ham.file_name}" @params[ham_id] << "1" @params["register_#{ham_id}"] << "spam" - @c.save_mode?.should be_true + expect(@c.save_mode?).to be_true @c.process_page_data - File.should_not be_exist(@ham.cache_file_name) - File.should be_exist(@ham.corpus_file_name_spam) - @ham.ham?.should be_false + expect(File).not_to be_exist(@ham.cache_file_name) + expect(File).to be_exist(@ham.corpus_file_name_spam) + expect(@ham.ham?).to be_false end end @@ -179,15 +179,15 @@ class << Object.new @filter_db = Bayes::PaulGraham.new @filter_db.spam << @token @filter_db.ham << @token - BayesFilter.stub!(:db).and_return(@filter_db) + allow(BayesFilter).to receive(:db).and_return(@filter_db) end it "should occur infinity-loop at #add_ham" do - lambda{@c.add_ham(@token)}.should_not raise_error + expect{@c.add_ham(@token)}.not_to raise_error end it "should occur infinity-loop at #add_spam" do - lambda{@c.add_spam(@token)}.should_not raise_error + expect{@c.add_spam(@token)}.not_to raise_error end end @@ -200,15 +200,15 @@ class << Object.new @filter_db = Bayes::PaulGraham.new @filter_db.spam << @token @filter_db.ham << @token - BayesFilter.stub!(:db).and_return(@filter_db) + allow(BayesFilter).to receive(:db).and_return(@filter_db) end it "should occur infinity-loop at #add_ham" do - lambda{@c.add_ham(@token)}.should_not raise_error + expect{@c.add_ham(@token)}.not_to raise_error end it "should occur infinity-loop at #add_spam" do - lambda{@c.add_spam(@token)}.should_not raise_error + expect{@c.add_spam(@token)}.not_to raise_error end end @@ -217,14 +217,14 @@ class << Object.new it "html(ham)" do @params["bfmode"] << BayesFilterConfig::Mode::HAM_TOKENS - @c.should_receive(:tokens_html){|token, title| @c.proxied_by_rspec__tokens_html(token, title)} - lambda{@c.html}.should_not raise_error + expect(@c).to receive(:tokens_html){|token, title| @c.proxied_by_rspec__tokens_html(token, title)} + expect{@c.html}.not_to raise_error end it "html(spam)" do @params["bfmode"] << BayesFilterConfig::Mode::SPAM_TOKENS - @c.should_receive(:tokens_html){|token, title| @c.proxied_by_rspec__tokens_html(token, title)} - lambda{@c.html}.should_not raise_error + expect(@c).to receive(:tokens_html){|token, title| @c.proxied_by_rspec__tokens_html(token, title)} + expect{@c.html}.not_to raise_error end end @@ -240,14 +240,14 @@ class << Object.new end it "html" do - @c.should_receive(:submitted_page_diff_html) - lambda{@c.html}.should_not raise_error + expect(@c).to receive(:submitted_page_diff_html) + expect{@c.html}.not_to raise_error end it "submitted_page_diff_html" do $SAFE=1 - @c.should_receive(:word_diff) - lambda{@c.submitted_page_diff_html}.should_not raise_error + expect(@c).to receive(:word_diff) + expect{@c.submitted_page_diff_html}.not_to raise_error end end @@ -262,13 +262,13 @@ class << Object.new end it "html" do - @c.should_receive(:page_token_html) - lambda{@c.html}.should_not raise_error + expect(@c).to receive(:page_token_html) + expect{@c.html}.not_to raise_error end it "submitted_page_diff_html" do $SAFE=1 - lambda{@c.page_token_html}.should_not raise_error + expect{@c.page_token_html}.not_to raise_error end end @@ -280,11 +280,11 @@ class << Object.new Hiki::Filter::BayesFilter::PageData.new(pd.new("HamPage", "text")).corpus_save(true) Hiki::Filter::BayesFilter::PageData.new(pd.new("SpamPage", "text")).corpus_save(false) @pages << "TestPage" - @db.should_receive(:load).and_return do |page| + expect(@db).to receive(:load) do |page| "Text" if page=="TestPage" end - @db.should_receive(:get_attribute).any_number_of_times do |pg, attr| - pg.should == "TestPage" + allow(@db).to receive(:get_attribute) do |pg, attr| + expect(pg).to eq("TestPage") case attr when :title "Title" @@ -295,7 +295,7 @@ class << Object.new end end - Hiki::Filter::BayesFilter.should_receive(:new_db){Hiki::Filter::BayesFilter.proxied_by_rspec__new_db} + expect(Hiki::Filter::BayesFilter).to receive(:new_db){Hiki::Filter::BayesFilter.proxied_by_rspec__new_db} lambda{$SAFE=1;@c.rebuild_db}.call #should_not raise_error end end Modified: spec/bayes_filter_spec.rb (+97 -97) =================================================================== --- spec/bayes_filter_spec.rb 2014-03-06 19:32:32 +0900 (b776f6a) +++ spec/bayes_filter_spec.rb 2014-03-06 19:32:56 +0900 (1e2a6d1) @@ -17,14 +17,14 @@ module SetupBayesFilter @index_url = "http://www.example.org/hiki/" @opt = { } - @conf = stub("Hiki::Config", + @conf = double("Hiki::Config", data_path:@tmpdir, cache_path:"#{@tmpdir}/cache", bayes_threshold:nil, site_name:"SiteName", index_url:@index_url, null_object:false) - @conf.should_receive("[]".intern).any_number_of_times{|k| @opt[k]} + allow(@conf).to receive("[]".intern){|k| @opt[k]} @bf = Hiki::Filter::BayesFilter.init(@conf) end @@ -46,7 +46,7 @@ class << Object.new include SetupBayesFilter it "@@hiki_conf.index_url should return correct URL" do - Dummy.new.conf.index_url.should == @index_url + expect(Dummy.new.conf.index_url).to eq(@index_url) end end end @@ -55,37 +55,37 @@ describe Hiki::Filter::BayesFilter, "with default settings" do include SetupBayesFilter it "is module" do - @bf.should be_kind_of(Module) + expect(@bf).to be_kind_of(Module) end it "threshold" do - @bf.threshold.should == 0.9 + expect(@bf.threshold).to eq(0.9) end it "threshold_ham" do - @bf.threshold_ham.should == 0.1 + expect(@bf.threshold_ham).to eq(0.1) end it "db" do - @bf.db.should be_kind_of(Bayes::PlainBayes) - @bf.db.db_name.should == "#{@tmpdir}/bayes.db" - File.should_not be_exist(@bf.db.db_name) - lambda{@bf.db.save}.should_not raise_error - File.should be_exist(@bf.db.db_name) + expect(@bf.db).to be_kind_of(Bayes::PlainBayes) + expect(@bf.db.db_name).to eq("#{@tmpdir}/bayes.db") + expect(File).not_to be_exist(@bf.db.db_name) + expect{@bf.db.save}.not_to raise_error + expect(File).to be_exist(@bf.db.db_name) end it "new db" do @bf.db.ham << "ham" - @bf.db.ham.should be_include("ham") + expect(@bf.db.ham).to be_include("ham") @bf.new_db - @bf.db.ham.should_not be_include("ham") + expect(@bf.db.ham).not_to be_include("ham") end it "cache_path" do path = "#{@tmpdir}/cache/bayes" - File.should_not be_exist(path) - @bf.cache_path.should == path - File.should be_exist(path) + expect(File).not_to be_exist(path) + expect(@bf.cache_path).to eq(path) + expect(File).to be_exist(path) end it ".filter should not call Hiki::Filter.plugin.sendmail" do @@ -94,10 +94,10 @@ describe Hiki::Filter::BayesFilter, "with default settings" do "text", "title") old_page = Hiki::Filter::PageData.new("Page") - plugin = stub("plugin") - Hiki::Filter.should_not_receive(:plugin) - lambda{@bf.filter(new_page, old_page, true)}.should_not raise_error - lambda{@bf.filter(new_page, old_page, false)}.should_not raise_error + plugin = double("plugin") + expect(Hiki::Filter).not_to receive(:plugin) + expect{@bf.filter(new_page, old_page, true)}.not_to raise_error + expect{@bf.filter(new_page, old_page, false)}.not_to raise_error end end @@ -108,40 +108,40 @@ describe Hiki::Filter::BayesFilter, "with settings" do @opt["bayes_filter.type"] = "Paul Graham" @opt["bayes_filter.report"] = "1" @opt["bayes_filter.threshold"] = "0.9" - @conf.should_receive("[]".intern).any_number_of_times{|k| @opt[k]} + allow(@conf).to receive("[]".intern){|k| @opt[k]} @bf = Hiki::Filter::BayesFilter.init(@conf) end it "is module" do - @bf.should be_kind_of(Module) + expect(@bf).to be_kind_of(Module) end it "threshold" do - @bf.threshold.should == 0.90 + expect(@bf.threshold).to eq(0.90) end it "db" do - @bf.db.should be_kind_of(Bayes::PaulGraham) + expect(@bf.db).to be_kind_of(Bayes::PaulGraham) end it "page_is_ham?" do - db = mock("database") - Hiki::Filter::BayesFilter.stub!(:db).and_return(db) + db = double("database") + allow(Hiki::Filter::BayesFilter).to receive(:db).and_return(db) pd = Hiki::Filter::PageData bfpd = Hiki::Filter::BayesFilter::PageData - db.stub!(:estimate).and_return(0.0) - bfpd.new(pd.new("Page", "text")).ham?.should be_true + allow(db).to receive(:estimate).and_return(0.0) + expect(bfpd.new(pd.new("Page", "text")).ham?).to be_true - db.stub!(:estimate).and_return(1.0) - bfpd.new(pd.new("Page", "spam")).ham?.should be_false + allow(db).to receive(:estimate).and_return(1.0) + expect(bfpd.new(pd.new("Page", "spam")).ham?).to be_false - db.stub!(:estimate).and_return(0.5) - bfpd.new(pd.new("Page", "ham spam")).ham?.should == nil + allow(db).to receive(:estimate).and_return(0.5) + expect(bfpd.new(pd.new("Page", "ham spam")).ham?).to eq(nil) - db.stub!(:estimate).and_return(nil) - bfpd.new(pd.new("Page", "ham spam")).ham?.should == nil + allow(db).to receive(:estimate).and_return(nil) + expect(bfpd.new(pd.new("Page", "ham spam")).ham?).to eq(nil) end it ".filter should call Hiki::Filter.plugin.sendmail" do @@ -150,10 +150,10 @@ describe Hiki::Filter::BayesFilter, "with settings" do "text", "title") old_page = Hiki::Filter::PageData.new("Page") - plugin = stub("plugin") - plugin.should_receive(:sendmail) - Hiki::Filter.should_receive(:plugin).and_return(plugin) - lambda{@bf.filter(new_page, old_page, false)}.should_not raise_error + plugin = double("plugin") + expect(plugin).to receive(:sendmail) + expect(Hiki::Filter).to receive(:plugin).and_return(plugin) + expect{@bf.filter(new_page, old_page, false)}.not_to raise_error end it ".filter should not call Hiki::Filter.plugin.sendmail when posted by registered user" do @@ -162,8 +162,8 @@ describe Hiki::Filter::BayesFilter, "with settings" do "text", "title") old_page = Hiki::Filter::PageData.new("Page") - Hiki::Filter.should_not_receive(:plugin) - lambda{@bf.filter(new_page, old_page, true)}.should_not raise_error + expect(Hiki::Filter).not_to receive(:plugin) + expect{@bf.filter(new_page, old_page, true)}.not_to raise_error end end @@ -172,20 +172,20 @@ describe Hiki::Filter::BayesFilter::PageData do it "url" do pd = Hiki::Filter::BayesFilter::PageData.new(Hiki::Filter::PageData.new("Wiki Name", "text")) - pd.url.should == "#{@index_url}?Wiki+Name" + expect(pd.url).to eq("#{@index_url}?Wiki+Name") end it "ham?" do pd = Hiki::Filter::PageData bfpd = Hiki::Filter::BayesFilter::PageData - bfpd.new(pd.new("Page", "text")).ham?.should be_nil + expect(bfpd.new(pd.new("Page", "text")).ham?).to be_nil @bf.db.ham << ["ham"] - bfpd.new(pd.new("Page", "ham")).ham?.should be_true + expect(bfpd.new(pd.new("Page", "ham")).ham?).to be_true @bf.db.spam << ["spam"] - bfpd.new(pd.new("Page", "spam")).ham?.should be_false - bfpd.new(pd.new("Page", "ham spam")).ham?.should be_nil + expect(bfpd.new(pd.new("Page", "spam")).ham?).to be_false + expect(bfpd.new(pd.new("Page", "ham spam")).ham?).to be_nil end it "token" do @@ -195,29 +195,29 @@ describe Hiki::Filter::BayesFilter::PageData do tl = Hiki::Filter::BayesFilter::TokenList.new tl << "Page" << "text" << "Title" << "keyword" tl.add_host("127.0.0.1", "A") - bfpd.new(o).token.sort.should == tl.sort + expect(bfpd.new(o).token.sort).to eq(tl.sort) tl.clear.add_host("127.0.0.1", "A") - bfpd.new(o.dup, o).token.sort.should == tl.sort + expect(bfpd.new(o.dup, o).token.sort).to eq(tl.sort) tl.clear << "newtext" << "New" << "newword" tl.add_host("127.0.0.2", "A") - bfpd.new(pd.new("Page", "text\nnewtext", "New", "newword\nkeyword", "127.0.0.2"), o).token.sort.should == tl.sort + expect(bfpd.new(pd.new("Page", "text\nnewtext", "New", "newword\nkeyword", "127.0.0.2"), o).token.sort).to eq(tl.sort) end it "diff_text" do pd = Hiki::Filter::PageData - Hiki::Filter::BayesFilter::PageData.new(pd.new("", "old1\nnew1\nold2\nnew2"), pd.new("", "old1\nold2")).diff_text.should == "new1\nnew2" + expect(Hiki::Filter::BayesFilter::PageData.new(pd.new("", "old1\nnew1\nold2\nnew2"), pd.new("", "old1\nold2")).diff_text).to eq("new1\nnew2") end it "diff_keyword" do pd = Hiki::Filter::PageData - Hiki::Filter::BayesFilter::PageData.new(pd.new(nil, nil, nil, "old1\nnew1\nold2\nnew2"), pd.new(nil, nil, nil, "old1\nold2")).diff_keyword.sort.should == ["new1", "new2"].sort + expect(Hiki::Filter::BayesFilter::PageData.new(pd.new(nil, nil, nil, "old1\nnew1\nold2\nnew2"), pd.new(nil, nil, nil, "old1\nold2")).diff_keyword.sort).to eq(["new1", "new2"].sort) end it "get_unified_diff" do pd = Hiki::Filter::PageData - Hiki::Filter::BayesFilter::PageData.new(pd.new("", "old1\nnew1\nold2\nnew2\n"), pd.new("", "old1\nold2\n")).get_unified_diff.should == "@@ -1,2 +1,4 @@\n old1\n+new1\n old2\n+new2\n" + expect(Hiki::Filter::BayesFilter::PageData.new(pd.new("", "old1\nnew1\nold2\nnew2\n"), pd.new("", "old1\nold2\n")).get_unified_diff).to eq("@@ -1,2 +1,4 @@\n old1\n+new1\n old2\n+new2\n") end end @@ -235,85 +235,85 @@ describe Hiki::Filter::BayesFilter::PageData, "save and load" do end it "time_str" do - @time_str.should == "20010203040506000007" + expect(@time_str).to eq("20010203040506000007") end it "file_name" do - @pd.file_name.should == @time_str + expect(@pd.file_name).to eq(@time_str) end it "PageData#cache_path" do path = "#{@tmpdir}/cache/bayes" - File.should_not be_exist(path) - @pd.cache_path.should == path - File.should be_exist(path) + expect(File).not_to be_exist(path) + expect(@pd.cache_path).to eq(path) + expect(File).to be_exist(path) end it "PageData.cache_path" do path = "#{@tmpdir}/cache/bayes" - File.should_not be_exist(path) - Hiki::Filter::BayesFilter::PageData.cache_path.should == path - File.should be_exist(path) + expect(File).not_to be_exist(path) + expect(Hiki::Filter::BayesFilter::PageData.cache_path).to eq(path) + expect(File).to be_exist(path) end it "PageData#corpus_path" do path = "#{@tmpdir}/cache/bayes/corpus" - File.should_not be_exist(path) - @pd.corpus_path.should == path - File.should be_exist(path) + expect(File).not_to be_exist(path) + expect(@pd.corpus_path).to eq(path) + expect(File).to be_exist(path) end it "PageData.corpus_path" do path = "#{@tmpdir}/cache/bayes/corpus" - File.should_not be_exist(path) - Hiki::Filter::BayesFilter::PageData.corpus_path.should == path - File.should be_exist(path) + expect(File).not_to be_exist(path) + expect(Hiki::Filter::BayesFilter::PageData.corpus_path).to eq(path) + expect(File).to be_exist(path) end it "cache_file_name if DOUBT" do - @pd.cache_file_name.should == "#{@tmpdir}/cache/bayes/D#{@time_str}" + expect(@pd.cache_file_name).to eq("#{@tmpdir}/cache/bayes/D#{@time_str}") end it "cache_file_name if HAM" do @bf.db.ham << ["text"] - @pd.cache_file_name.should == "#{@tmpdir}/cache/bayes/H#{@time_str}" + expect(@pd.cache_file_name).to eq("#{@tmpdir}/cache/bayes/H#{@time_str}") end it "cache_file_name if SPAM" do @bf.db.spam << ["WikiName", "New", "Title"] - @pd.cache_file_name.should == "#{@tmpdir}/cache/bayes/S#{@time_str}" + expect(@pd.cache_file_name).to eq("#{@tmpdir}/cache/bayes/S#{@time_str}") end it "save and load" do - File.should_not be_exist(@pd.cache_file_name) + expect(File).not_to be_exist(@pd.cache_file_name) @pd.cache_save - File.should be_exist(@pd.cache_file_name) + expect(File).to be_exist(@pd.cache_file_name) pd2 = Hiki::Filter::BayesFilter::PageData.load(@pd.cache_file_name) - pd2.should be_kind_of(Hiki::Filter::BayesFilter::PageData) + expect(pd2).to be_kind_of(Hiki::Filter::BayesFilter::PageData) [:page, :text, :title, :keyword, :remote_addr].each do |m| - pd2.old_page.send(m).should ==****@pd*****_page.send(m) - pd2.new_page.send(m).should ==****@pd*****_page.send(m) + expect(pd2.old_page.send(m)).to eq(@pd.old_page.send(m)) + expect(pd2.new_page.send(m)).to eq(@pd.new_page.send(m)) end - pd2.time.should ==****@pd***** + expect(pd2.time).to eq(@pd.time) end it "load and delete" do @pd.cache_save - File.should be_exist(@pd.cache_file_name) + expect(File).to be_exist(@pd.cache_file_name) Hiki::Filter::BayesFilter::PageData.load(@pd.cache_file_name) - File.should be_exist(@pd.cache_file_name) + expect(File).to be_exist(@pd.cache_file_name) Hiki::Filter::BayesFilter::PageData.load(@pd.cache_file_name, true) - File.should_not be_exist(@pd.cache_file_name) + expect(File).not_to be_exist(@pd.cache_file_name) path = "#{@tmpdir}/dummy" open(path, "w") do |f| Marshal.dump([], f) end - File.should be_exist(path) + expect(File).to be_exist(path) Hiki::Filter::BayesFilter::PageData.load(path) - File.should be_exist(path) + expect(File).to be_exist(path) Hiki::Filter::BayesFilter::PageData.load(path, true) - File.should be_exist(path) + expect(File).to be_exist(path) end it "load invalid data and return nil" do @@ -321,50 +321,50 @@ describe Hiki::Filter::BayesFilter::PageData, "save and load" do open(file, "w") do |f| Marshal.dump([], f) end - Hiki::Filter::BayesFilter::PageData.load(file).should be_nil + expect(Hiki::Filter::BayesFilter::PageData.load(file)).to be_nil end it "load cache" do @pd.cache_save pd = Hiki::Filter::BayesFilter::PageData.load_from_cache(@pd.cache_file_name[/.\d+$/]) - pd.cache_file_name.should ==****@pd*****_file_name + expect(pd.cache_file_name).to eq(@pd.cache_file_name) Hiki::Filter::BayesFilter::PageData.load_from_cache(@pd.cache_file_name[/.\d+$/], true) - File.should_not be_exist(@pd.cache_file_name) + expect(File).not_to be_exist(@pd.cache_file_name) end it "save different data at same time" do fn =****@pd*****_file_name @pd.cache_save - @pd.cache_file_name.should == fn - @pd.time.should == @time + expect(@pd.cache_file_name).to eq(fn) + expect(@pd.time).to eq(@time) @pd.cache_save - @pd.time.should_not == @time - @pd.time.should == @time2 - @pd.file_name.should == @time2_str - @pd.cache_file_name.should == "#{@tmpdir}/cache/bayes/D#{@time2_str}" + expect(@pd.time).not_to eq(@time) + expect(@pd.time).to eq(@time2) + expect(@pd.file_name).to eq(@time2_str) + expect(@pd.cache_file_name).to eq("#{@tmpdir}/cache/bayes/D#{@time2_str}") end it "saving at same time over 10 times raise error" do time =****@time***** 10.times do - lambda{@pd.cache_save}.should_not raise_error + expect{@pd.cache_save}.not_to raise_error end - @time.should == time + expect(@time).to eq(time) pd2 = Hiki::Filter::BayesFilter::PageData.new(Hiki::Filter::PageData.new("Page", "text", "Title"), Hiki::Filter::PageData.new, @time) - lambda{pd2.cache_save}.should raise_error(Errno::EEXIST) - pd2.time.should == time + expect{pd2.cache_save}.to raise_error(Errno::EEXIST) + expect(pd2.time).to eq(time) end it "corpus_save" do ham = "#{@tmpdir}/cache/bayes/corpus/H#{@time_str}" spam = "#{@tmpdir}/cache/bayes/corpus/S#{@time_str}" - File.should_not be_exist(ham) - File.should_not be_exist(spam) + expect(File).not_to be_exist(ham) + expect(File).not_to be_exist(spam) @pd.corpus_save(true) - File.should be_exist(ham) + expect(File).to be_exist(ham) @pd.corpus_save(false) - File.should be_exist(spam) + expect(File).to be_exist(spam) end end Modified: spec/filter_spec.rb (+7 -7) =================================================================== --- spec/filter_spec.rb 2014-03-06 19:32:32 +0900 (5c4b252) +++ spec/filter_spec.rb 2014-03-06 19:32:56 +0900 (72c10db) @@ -9,7 +9,7 @@ require "hiki/command" describe Hiki::Filter, "when error raised in filtering" do before do - Hiki::Filter.should_not be_respond_to(:clear_filters) + expect(Hiki::Filter).not_to be_respond_to(:clear_filters) module Hiki::Filter def self.clear_filters r =****@filte***** @@ -23,10 +23,10 @@ describe Hiki::Filter, "when error raised in filtering" do raise "ERROR" end - @conf = stub("conf", null_object:true) - @cgi = stub("cgi", null_object:true) - @plugin = stub("plugin", null_object:true) - @db = stub("db", null_object:true) + @conf = double("conf", null_object:true) + @cgi = double("cgi", null_object:true) + @plugin = double("plugin", null_object:true) + @db = double("db", null_object:true) Hiki::Filter.init(@conf, @cgi, @plugin, @db) end @@ -43,7 +43,7 @@ describe Hiki::Filter, "when error raised in filtering" do it "should through page data without filter raised error" do r = nil - lambda{r = Hiki::Filter.new_page_is_spam?("TestPage", "text", "title")}.should_not raise_error - r.should be_false + expect{r = Hiki::Filter.new_page_is_spam?("TestPage", "text", "title")}.not_to raise_error + expect(r).to be_false end end