Commit d3fcb0ca authored by Wandenberg's avatar Wandenberg

update gems and corresponding tests syntax

parent be0ae92b
source "https://rubygems.org"
gem 'rake', '~> 10.0.3'
ruby '2.1.2'
gem 'rake'
group :test do
gem 'rspec', '~> 2.14.1'
gem 'em-http-request', '~> 1.0.3'
gem 'nginx_test_helper', '~> 0.3.0'
gem 'jshintrb', '~> 0.2.1'
gem 'rspec'
gem 'nginx_test_helper', '~> 0.4.0'
gem 'jshintrb'
gem 'therubyracer'
gem 'jasmine', '~> 1.3.1'
gem 'listen', '~> 0.7.2'
gem 'rb-inotify', '~> 0.9.4', :require => RUBY_PLATFORM.include?('linux') && 'rb-inotify'
gem 'rb-fsevent', '~> 0.9', :require => RUBY_PLATFORM.include?('darwin') && 'rb-fsevent'
gem 'json', '~> 1.8.1'
gem 'thin', '~> 1.5.1'
gem 'net-http-persistent', '~> 2.9', :require => 'net/http/persistent'
gem 'jasmine'
gem 'listen'
gem 'rb-inotify', require: RUBY_PLATFORM.include?('linux') && 'rb-inotify'
gem 'rb-fsevent', require: RUBY_PLATFORM.include?('darwin') && 'rb-fsevent'
gem 'json'
gem 'thin'
gem 'net-http-persistent', require: 'net/http/persistent'
gem 'websocket-eventmachine-client'
gem 'em-eventsource'
gem 'byebug', '~> 1.3.1'
gem 'byebug'
end
group :docs do
gem 'github-markup', '~> 0.7.5', :require => 'github/markup'
gem 'RedCloth', '~> 4.2.9'
gem 'nokogiri', '~> 1.5.6'
gem 'github-markup', require: 'github/markup'
gem 'RedCloth'
gem 'nokogiri'
gem 'filewatcher'
end
......@@ -3,86 +3,92 @@ GEM
specs:
Platform (0.4.0)
RedCloth (4.2.9)
addressable (2.3.7)
byebug (1.3.1)
columnize (~> 0.3.6)
debugger-linecache (~> 1.2.0)
childprocess (0.5.5)
ffi (~> 1.0, >= 1.0.11)
columnize (0.3.6)
addressable (2.3.8)
byebug (4.0.5)
columnize (= 0.9.0)
celluloid (0.16.0)
timers (~> 4.0.0)
columnize (0.9.0)
cookiejar (0.3.2)
daemons (1.1.9)
debugger-linecache (1.2.0)
daemons (1.2.2)
diff-lcs (1.2.5)
em-eventsource (0.2.0)
em-http-request (>= 1.0.0)
eventmachine (>= 1.0.0.beta3)
em-http-request (1.0.3)
addressable (>= 2.2.3)
em-http-request (1.1.2)
addressable (>= 2.3.4)
cookiejar
em-socksify
eventmachine (>= 1.0.0.beta.4)
http_parser.rb (>= 0.5.3)
em-socksify (>= 0.3)
eventmachine (>= 1.0.3)
http_parser.rb (>= 0.6.0)
em-socksify (0.3.0)
eventmachine (>= 1.0.0.beta.4)
eventmachine (1.0.7)
execjs (2.0.2)
ffi (1.9.6)
filewatcher (0.3.4)
execjs (2.5.0)
ffi (1.9.8)
filewatcher (0.4.0)
trollop (~> 2.0)
github-markup (0.7.5)
github-markup (1.3.3)
hitimes (1.2.2)
http_parser.rb (0.6.0)
jasmine (1.3.2)
jasmine-core (~> 1.3.1)
rack (~> 1.0)
rspec (>= 1.3.1)
selenium-webdriver (>= 0.1.3)
jasmine-core (1.3.1)
jshintrb (0.2.4)
jasmine (2.2.0)
jasmine-core (~> 2.2)
phantomjs
rack (>= 1.2.1)
rake
jasmine-core (2.2.0)
jshintrb (0.3.0)
execjs
multi_json (>= 1.3)
rake
json (1.8.1)
libv8 (3.16.14.3)
listen (0.7.3)
multi_json (1.9.3)
json (1.8.2)
libv8 (3.16.14.7)
listen (2.10.0)
celluloid (~> 0.16.0)
rb-fsevent (>= 0.9.3)
rb-inotify (>= 0.9)
mini_portile (0.6.2)
multi_json (1.11.0)
net-http-persistent (2.9.4)
nginx_test_helper (0.3.0)
nginx_test_helper (0.4.1)
popen4
nokogiri (1.5.11)
open4 (1.3.3)
nokogiri (1.6.6.2)
mini_portile (~> 0.6.0)
open4 (1.3.4)
phantomjs (1.9.8.0)
popen4 (0.1.2)
Platform (>= 0.4.0)
open4 (>= 0.4.0)
rack (1.5.2)
rake (10.0.4)
rack (1.6.0)
rake (10.4.2)
rb-fsevent (0.9.4)
rb-inotify (0.9.4)
rb-inotify (0.9.5)
ffi (>= 0.5.0)
ref (1.0.5)
rspec (2.14.1)
rspec-core (~> 2.14.0)
rspec-expectations (~> 2.14.0)
rspec-mocks (~> 2.14.0)
rspec-core (2.14.8)
rspec-expectations (2.14.5)
diff-lcs (>= 1.1.3, < 2.0)
rspec-mocks (2.14.6)
rubyzip (1.1.3)
selenium-webdriver (2.41.0)
childprocess (>= 0.5.0)
multi_json (~> 1.0)
rubyzip (~> 1.0)
websocket (~> 1.0.4)
therubyracer (0.12.1)
rspec (3.2.0)
rspec-core (~> 3.2.0)
rspec-expectations (~> 3.2.0)
rspec-mocks (~> 3.2.0)
rspec-core (3.2.3)
rspec-support (~> 3.2.0)
rspec-expectations (3.2.1)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.2.0)
rspec-mocks (3.2.1)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.2.0)
rspec-support (3.2.2)
therubyracer (0.12.2)
libv8 (~> 3.16.14.0)
ref
thin (1.5.1)
daemons (>= 1.0.9)
eventmachine (>= 0.12.6)
rack (>= 1.0.0)
trollop (2.0)
websocket (1.0.7)
thin (1.6.3)
daemons (~> 1.0, >= 1.0.9)
eventmachine (~> 1.0)
rack (~> 1.0)
timers (4.0.1)
hitimes
trollop (2.1.2)
websocket (1.2.1)
websocket-eventmachine-base (1.1.0)
eventmachine (~> 1.0)
websocket (~> 1.0)
......@@ -95,23 +101,22 @@ PLATFORMS
ruby
DEPENDENCIES
RedCloth (~> 4.2.9)
byebug (~> 1.3.1)
RedCloth
byebug
em-eventsource
em-http-request (~> 1.0.3)
filewatcher
github-markup (~> 0.7.5)
jasmine (~> 1.3.1)
jshintrb (~> 0.2.1)
json (~> 1.8.1)
listen (~> 0.7.2)
net-http-persistent (~> 2.9)
nginx_test_helper (~> 0.3.0)
nokogiri (~> 1.5.6)
rake (~> 10.0.3)
rb-fsevent (~> 0.9)
rb-inotify (~> 0.9.4)
rspec (~> 2.14.1)
github-markup
jasmine
jshintrb
json
listen
net-http-persistent
nginx_test_helper (~> 0.4.0)
nokogiri
rake
rb-fsevent
rb-inotify
rspec
therubyracer
thin (~> 1.5.1)
thin
websocket-eventmachine-client
......@@ -40,7 +40,7 @@ begin
end
task :monitor_js do
copy_inner_js = Proc.new do |modified, added, removed|
def copy_inner_js(modified, added, removed)
modified.each do |file|
destiny_path = File.dirname(file).gsub(/.*\/js\/?/, File.expand_path('pushstream/js', Dir.tmpdir))
FileUtils.mkdir_p(destiny_path)
......@@ -50,10 +50,11 @@ begin
end
end
copy_inner_js.call([[File.expand_path('misc/js/pushstream.js', project_dir)], [], []])
listener = Listen.to(File.expand_path('misc/js', project_dir), :filter => /\.js$/)
listener.change(&copy_inner_js)
listener.start(false)
copy_inner_js([File.expand_path('misc/js/pushstream.js', project_dir)], [], [])
listener = Listen.to(File.expand_path('misc/js', project_dir), :filter => /\.js$/) do |modified, added, removed|
copy_inner_js(modified, added, removed)
end
listener.start
end
task :test_server do
......
This diff is collapsed.
beforeEach(function() {
this.addMatchers({
});
});
(function() {
var D = new Date('2011-06-02T09:34:29+02:00');
if (!D || +D !== 1307000069000) {
......@@ -37,3 +31,29 @@ beforeEach(function() {
};
}
})();
// This is the equivalent of the old waitsFor/runs syntax
// which was removed from Jasmine 2
var waitsForAndRuns = function(escapeFunction, runFunction, escapeTime) {
// check the escapeFunction every millisecond so as soon as it is met we can escape the function
var interval = setInterval(function() {
if (escapeFunction()) {
clearMe();
runFunction();
}
}, 1);
// in case we never reach the escapeFunction, we will time out
// at the escapeTime
var timeOut = setTimeout(function() {
clearMe();
runFunction();
}, escapeTime);
// clear the interval and the timeout
function clearMe(){
clearInterval(interval);
clearTimeout(timeOut);
}
};
# src_files
#
# Return an array of filepaths relative to src_dir to include before jasmine specs.
# Default: []
#
# EXAMPLE:
#
# src_files:
# - lib/source1.js
# - lib/source2.js
# - dist/**/*.js
#
src_files:
- public/javascripts/**/*.js
# stylesheets
#
# Return an array of stylesheet filepaths relative to src_dir to include before jasmine specs.
# Default: []
#
# EXAMPLE:
#
# stylesheets:
# - css/style.css
# - stylesheets/*.css
#
stylesheets:
- stylesheets/**/*.css
# helpers
#
# Return an array of filepaths relative to spec_dir to include before jasmine specs.
# Default: ["helpers/**/*.js"]
#
# EXAMPLE:
#
# helpers:
# - helpers/**/*.js
#
helpers:
- 'helpers/**/*.js'
# spec_files
#
# Return an array of filepaths relative to spec_dir to include.
# Default: ["**/*[sS]pec.js"]
#
# EXAMPLE:
#
# spec_files:
# - **/*[sS]pec.js
#
spec_files:
- '**/*[sS]pec.js'
# src_dir
#
# Source directory path. Your src_files must be returned relative to this path. Will use root if left blank.
# Default: project root
#
# EXAMPLE:
#
# src_dir: public
#
src_dir:
# spec_dir
#
# Spec directory path. Your spec_files must be returned relative to this path.
# Default: spec/javascripts
#
# EXAMPLE:
#
# spec_dir: spec/javascripts
#
spec_dir:
# spec_helper
#
# Ruby file that Jasmine server will require before starting.
# Returned relative to your root path
# Default spec/javascripts/support/jasmine_helper.rb
#
# EXAMPLE:
#
# spec_helper: spec/javascripts/support/jasmine_helper.rb
#
spec_helper: spec/javascripts/support/jasmine_helper.rb
# boot_dir
#
# Boot directory path. Your boot_files must be returned relative to this path.
# Default: Built in boot file
#
# EXAMPLE:
#
# boot_dir: spec/javascripts/support/boot
#
boot_dir:
# boot_files
#
# Return an array of filepaths relative to boot_dir to include in order to boot Jasmine
# Default: Built in boot file
#
# EXAMPLE
#
# boot_files:
# - '**/*.js'
#
boot_files:
# rack_options
#
# Extra options to be passed to the rack server
# by default, Port and AccessLog are passed.
#
# This is an advanced options, and left empty by default
#
# EXAMPLE
#
# rack_options:
# server: 'thin'
Jasmine.configure do |config|
project_dir = File.expand_path('../../../../misc/..', File.dirname(__FILE__))
config.spec_dir = project_dir
config.spec_files = lambda { Dir["#{project_dir}/misc/spec/javascripts/helpers/**/*.js"] + Dir["#{project_dir}/misc/js/jquery.min.js"] + Dir["#{project_dir}/misc/**/*[sS]pec.js"] }
js_tmp_dir = File.expand_path('pushstream/js', Dir.tmpdir)
config.src_dir = js_tmp_dir
config.src_files = lambda { Dir["#{js_tmp_dir}/**/*.js"] }
end
This diff is collapsed.
This diff is collapsed.
......@@ -27,16 +27,16 @@ describe "Keepalive" do
post_single = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}"
post_single.body = body
response_single = http_single.request(uri, post_single)
response_single.code.should eql("200")
response_single.body.should eql(%({"channel": "#{channel}#{i + j}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n))
expect(response_single.code).to eql("200")
expect(response_single.body).to eql(%({"channel": "#{channel}#{i + j}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n))
post_double = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}/#{channel}#{i}_#{j}"
post_double.body = body
response_double = http_double.request(uri, post_double)
response_double.code.should eql("200")
response_double.body.should match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "#{(i + j) * 2}", "wildcard_channels": "0", "uptime": "[0-9]*", "infos": \[\r\n/)
response_double.body.should match_the_pattern(/"channel": "#{channel}#{i + j}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"},\r\n/)
response_double.body.should match_the_pattern(/"channel": "#{channel}#{i}_#{j}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
expect(response_double.code).to eql("200")
expect(response_double.body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "#{(i + j) * 2}", "wildcard_channels": "0", "uptime": "[0-9]*", "infos": \[\r\n/)
expect(response_double.body).to match_the_pattern(/"channel": "#{channel}#{i + j}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"},\r\n/)
expect(response_double.body).to match_the_pattern(/"channel": "#{channel}#{i}_#{j}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
end
end
end
......@@ -55,8 +55,8 @@ describe "Keepalive" do
post = Net::HTTP::Post.new "/pub?id=#{channel}#{i + j}"
post.body = body
response = http.request(uri, post)
response.code.should eql("200")
response.body.should eql("")
expect(response.code).to eql("200")
expect(response.body).to eql("")
end
end
end
......@@ -70,40 +70,40 @@ describe "Keepalive" do
socket = open_socket(nginx_host, nginx_port)
headers, body = get_in_socket("/pub", socket)
body.should eql("")
headers.should include("No channel id provided.")
expect(body).to eql("")
expect(headers).to include("No channel id provided.")
headers, body = post_in_socket("/pub?id=#{channel}", content, socket, {:wait_for => "}\r\n"})
body.should eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
expect(body).to eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
headers, body = get_in_socket("/channels-stats", socket)
body.should match_the_pattern(/"channels": "1", "wildcard_channels": "0", "published_messages": "1", "stored_messages": "1", "messages_in_trash": "0", "channels_in_trash": "0", "subscribers": "0", "uptime": "[0-9]*", "by_worker": \[\r\n/)
body.should match_the_pattern(/\{"pid": "[0-9]*", "subscribers": "0", "uptime": "[0-9]*"\}/)
expect(body).to match_the_pattern(/"channels": "1", "wildcard_channels": "0", "published_messages": "1", "stored_messages": "1", "messages_in_trash": "0", "channels_in_trash": "0", "subscribers": "0", "uptime": "[0-9]*", "by_worker": \[\r\n/)
expect(body).to match_the_pattern(/\{"pid": "[0-9]*", "subscribers": "0", "uptime": "[0-9]*"\}/)
socket.print("DELETE /pub?id=#{channel}_1 HTTP/1.1\r\nHost: test\r\n\r\n")
headers, body = read_response_on_socket(socket)
headers.should include("HTTP/1.1 404 Not Found")
expect(headers).to include("HTTP/1.1 404 Not Found")
headers, body = get_in_socket("/channels-stats?id=ALL", socket)
body.should match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "1", "wildcard_channels": "0", "uptime": "[0-9]*", "infos": \[\r\n/)
body.should match_the_pattern(/"channel": "#{channel}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
expect(body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "1", "wildcard_channels": "0", "uptime": "[0-9]*", "infos": \[\r\n/)
expect(body).to match_the_pattern(/"channel": "#{channel}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
headers, body = get_in_socket("/pub?id=#{channel}", socket)
body.should eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
expect(body).to eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
headers, body = post_in_socket("/pub?id=#{channel}/broad_#{channel}", content, socket, {:wait_for => "}\r\n"})
body.should match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "1", "wildcard_channels": "1", "uptime": "[0-9]*", "infos": \[\r\n/)
body.should match_the_pattern(/"channel": "#{channel}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"},\r\n/)
body.should match_the_pattern(/"channel": "broad_#{channel}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
expect(body).to match_the_pattern(/"hostname": "[^"]*", "time": "\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", "channels": "1", "wildcard_channels": "1", "uptime": "[0-9]*", "infos": \[\r\n/)
expect(body).to match_the_pattern(/"channel": "#{channel}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"},\r\n/)
expect(body).to match_the_pattern(/"channel": "broad_#{channel}", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}\r\n/)
headers, body = get_in_socket("/channels-stats?id=#{channel}", socket)
body.should match_the_pattern(/{"channel": "#{channel}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"}\r\n/)
expect(body).to match_the_pattern(/{"channel": "#{channel}", "published_messages": "2", "stored_messages": "2", "subscribers": "0"}\r\n/)
socket.print("DELETE /pub?id=#{channel} HTTP/1.1\r\nHost: test\r\n\r\n")
headers, body = read_response_on_socket(socket)
headers.should include("X-Nginx-PushStream-Explain: Channel deleted.")
expect(headers).to include("X-Nginx-PushStream-Explain: Channel deleted.")
socket.close
end
......@@ -122,7 +122,7 @@ describe "Keepalive" do
socket.print(get_messages)
post_in_socket("/pub?id=#{channel}", "#{body_prefix} #{j.to_s.rjust(3, '0')}", socket_pub, {:wait_for => "}\r\n"})
headers, body = read_response_on_socket(socket, "\r\n0\r\n\r\n")
body.should eql("16\r\nmessage to be sent #{j.to_s.rjust(3, '0')}\r\n0\r\n\r\n")
expect(body).to eql("16\r\nmessage to be sent #{j.to_s.rjust(3, '0')}\r\n0\r\n\r\n")
end
socket.close
......
......@@ -39,11 +39,11 @@ describe "Measure Memory" do
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get
pub_2.callback do
pub_2.should be_http_status(200).with_body
expect(pub_2).to be_http_status(200).with_body
resp = JSON.parse(pub_2.response)
expected_message = shared_size / (message_estimate_size + body.size)
resp["published_messages"].to_i.should be_within(80).of(expected_message)
expect(resp["published_messages"].to_i).to be_within(80).of(expected_message)
EventMachine.stop
end
end
......@@ -71,11 +71,11 @@ describe "Measure Memory" do
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get
pub_2.callback do
pub_2.should be_http_status(200).with_body
expect(pub_2).to be_http_status(200).with_body
resp = JSON.parse(pub_2.response)
expected_channel = (shared_size - ((body.size + message_estimate_size) * resp["published_messages"].to_i)) / (channel_estimate_size + 4) # 4 channel id size
resp["channels"].to_i.should be_within(10).of(expected_channel)
expect(resp["channels"].to_i).to be_within(10).of(expected_channel)
EventMachine.stop
end
end
......@@ -90,11 +90,11 @@ describe "Measure Memory" do
subscriber_in_loop(1000, headers) do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
pub_2.should be_http_status(200).with_body
expect(pub_2).to be_http_status(200).with_body
resp = JSON.parse(pub_2.response)
expected_subscriber = (shared_size - ((channel_estimate_size + 4) * resp["channels"].to_i)) / subscriber_estimate_size # 4 channel id size
resp["subscribers"].to_i.should be_within(10).of(expected_subscriber)
expect(resp["subscribers"].to_i).to be_within(10).of(expected_subscriber)
EventMachine.stop
end
end
......@@ -127,17 +127,18 @@ describe "Measure Memory" do
end
end
per_subscriber.should be_within(100).of(subscriber_estimate_system_size)
expect(per_subscriber).to be_within(100).of(subscriber_estimate_system_size)
end
end
end
def subscriber_in_loop(channel, headers, &block)
called = false
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_i.to_s).get :head => headers
sub.stream do |chunk|
subscriber_in_loop(channel.to_i + 1, headers) do
yield block
end
next if called
called = true
subscriber_in_loop(channel.to_i + 1, headers, &block)
end
sub.callback do
block.call
......@@ -145,15 +146,15 @@ def subscriber_in_loop(channel, headers, &block)
end
def subscriber_in_loop_with_limit(channel, headers, start, limit, &block)
called = false
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_i.to_s).get :head => headers
sub.stream do |chunk|
if start == limit
block.call
EventMachine.stop
else
subscriber_in_loop_with_limit(channel, headers, start + 1, limit) do
yield block
end
next if called
called = true
subscriber_in_loop_with_limit(channel, headers, start + 1, limit, &block)
end
end
sub.callback do
......
......@@ -31,9 +31,9 @@ describe "Send Signals" do
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge('X-Nginx-PushStream-Mode' => 'long-polling')
sub_1.callback do
sub_1.should be_http_status(304).without_body
Time.parse(sub_1.response_header['LAST_MODIFIED'].to_s).utc.to_i.should be_in_the_interval(Time.now.utc.to_i-1, Time.now.utc.to_i)
sub_1.response_header['ETAG'].to_s.should eql("0")
expect(sub_1).to be_http_status(304).without_body
expect(Time.parse(sub_1.response_header['LAST_MODIFIED'].to_s).utc.to_i).to be_in_the_interval(Time.now.utc.to_i-1, Time.now.utc.to_i)
expect(sub_1.response_header['ETAG'].to_s).to eql("0")
end
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
......@@ -43,7 +43,7 @@ describe "Send Signals" do
response += chunk
end
sub_2.callback do
response.should include("FOOTER")
expect(response).to include("FOOTER")
EventMachine.stop
end
end
......@@ -71,11 +71,11 @@ describe "Send Signals" do
# check statistics
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_1.callback do
pub_1.should be_http_status(200).with_body
expect(pub_1).to be_http_status(200).with_body
resp_1 = JSON.parse(pub_1.response)
resp_1.has_key?("channels").should be_true
resp_1["channels"].to_i.should eql(1)
resp_1["by_worker"].count.should eql(1)
expect(resp_1.has_key?("channels")).to be_truthy
expect(resp_1["channels"].to_i).to eql(1)
expect(resp_1["by_worker"].count).to eql(1)
pid = resp_1["by_worker"][0]['pid'].to_i
open_sockets_1 = `lsof -p #{Process.getpgid pid} | grep socket | wc -l`.strip
......@@ -96,7 +96,7 @@ describe "Send Signals" do
pub_4 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_4.callback do
resp_3 = JSON.parse(pub_4.response)
resp_3.has_key?("by_worker").should be_true
expect(resp_3.has_key?("by_worker")).to be_truthy
old_process_running = Process.getpgid(pid) rescue false
if !old_process_running && (resp_3["by_worker"].count == 1) && (pid != resp_3["by_worker"][0]['pid'].to_i)
......@@ -115,13 +115,13 @@ describe "Send Signals" do
pub_3.callback do
resp_2 = JSON.parse(pub_3.response)
resp_2.has_key?("channels").should be_true
resp_2["channels"].to_i.should eql(1)
resp_2["published_messages"].to_i.should eql(1)
resp_2["subscribers"].to_i.should eql(1)
expect(resp_2.has_key?("channels")).to be_truthy
expect(resp_2["channels"].to_i).to eql(1)
expect(resp_2["published_messages"].to_i).to eql(1)
expect(resp_2["subscribers"].to_i).to eql(1)
open_sockets_2 = `lsof -p #{Process.getpgid resp_3["by_worker"][0]['pid'].to_i} | grep socket | wc -l`.strip
open_sockets_2.should eql(open_sockets_1)
expect(open_sockets_2).to eql(open_sockets_1)
EventMachine.stop
......@@ -129,7 +129,7 @@ describe "Send Signals" do
`#{ nginx_executable } -c #{ conf.configuration_filename } -s stop > /dev/null 2>&1`
error_log_pos = File.readlines(conf.error_log)
(error_log_pos - error_log_pre).join.should_not include("open socket")
expect((error_log_pos - error_log_pre).join).not_to include("open socket")
socket.close unless socket.nil?
end
end
......@@ -158,11 +158,11 @@ describe "Send Signals" do
# check statistics
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_1.callback do
pub_1.should be_http_status(200).with_body
expect(pub_1).to be_http_status(200).with_body
resp_1 = JSON.parse(pub_1.response)
resp_1["subscribers"].to_i.should eql(1)
resp_1["channels"].to_i.should eql(1)
resp_1["by_worker"].count.should eql(1)
expect(resp_1["subscribers"].to_i).to eql(1)
expect(resp_1["channels"].to_i).to eql(1)
expect(resp_1["by_worker"].count).to eql(1)
pid = resp_1["by_worker"][0]['pid'].to_i
# send reload signal
......@@ -175,14 +175,14 @@ describe "Send Signals" do
pub_4 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_4.callback do
resp_3 = JSON.parse(pub_4.response)
resp_3.has_key?("by_worker").should be_true
expect(resp_3.has_key?("by_worker")).to be_truthy
if resp_3["by_worker"].count == 1
resp_3["subscribers"].to_i.should eql(0)
resp_3["channels"].to_i.should eql(1)
expect(resp_3["subscribers"].to_i).to eql(0)
expect(resp_3["channels"].to_i).to eql(1)
pid2 = resp_3["by_worker"][0]['pid'].to_i
pid.should_not eql(pid2)
expect(pid).not_to eql(pid2)
EventMachine.stop
end
end
......@@ -228,11 +228,11 @@ describe "Send Signals" do
# check statistics
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_1.callback do
pub_1.should be_http_status(200).with_body
expect(pub_1).to be_http_status(200).with_body
resp_1 = JSON.parse(pub_1.response)
resp_1.has_key?("channels").should be_true
resp_1["channels"].to_i.should eql(1)
resp_1["published_messages"].to_i.should eql(1)
expect(resp_1.has_key?("channels")).to be_truthy
expect(resp_1["channels"].to_i).to eql(1)
expect(resp_1["published_messages"].to_i).to eql(1)
conf.configuration[:shared_memory_size] = '20m'
conf.create_configuration_file
......@@ -244,14 +244,14 @@ describe "Send Signals" do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
pub_2.should be_http_status(200).with_body
expect(pub_2).to be_http_status(200).with_body
resp_2 = JSON.parse(pub_2.response)
resp_2.has_key?("channels").should be_true
resp_2["channels"].to_i.should eql(1)
resp_2["published_messages"].to_i.should eql(1)
expect(resp_2.has_key?("channels")).to be_truthy
expect(resp_2["channels"].to_i).to eql(1)
expect(resp_2["published_messages"].to_i).to eql(1)
error_log = File.read(conf.error_log)
error_log.should include("Cannot change memory area size without restart, ignoring change")
expect(error_log).to include("Cannot change memory area size without restart, ignoring change")
EventMachine.stop
end
......
This diff is collapsed.
......@@ -22,11 +22,11 @@ describe "Wildcard Properties" do
pub.callback do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad_fail).get :head => headers
sub_1.callback do |chunk|
sub_1.should be_http_status(403).without_body
expect(sub_1).to be_http_status(403).without_body
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad).get :head => headers
sub_2.stream do |chunk2|
chunk2.should eql(conf.header_template)
expect(chunk2).to eql(conf.header_template)
EventMachine.stop
end
end
......@@ -48,7 +48,7 @@ describe "Wildcard Properties" do
pub.callback do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad1 + '/' + channel_broad2 + '/' + channel_broad3).get :head => headers
sub_1.callback do |chunk|
sub_1.should be_http_status(403).without_body
expect(sub_1).to be_http_status(403).without_body
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad1 + '/' + channel_broad2).get :head => headers
sub_2.stream do
EventMachine.stop
......
......@@ -10,7 +10,7 @@ describe "Publisher Channel id collision" do
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel).post :body => 'x'
pub.callback do
pub.should be_http_status(200)
expect(pub).to be_http_status(200)
EventMachine.stop
end
end
......@@ -20,7 +20,7 @@ describe "Publisher Channel id collision" do
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=' + channel).get :timeout => 30
pub.callback do
pub.should be_http_status(200)
expect(pub).to be_http_status(200)
EventMachine.stop
end
end
......
This diff is collapsed.
......@@ -18,7 +18,7 @@ describe "Publisher Publishing Messages" do
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub.stream do |chunk|
chunk.should eql(body)
expect(chunk).to eql(body)
EventMachine.stop
end
......@@ -35,7 +35,7 @@ describe "Publisher Publishing Messages" do
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub.stream do |chunk|
chunk.should eql(body)
expect(chunk).to eql(body)
EventMachine.stop
end
......@@ -69,7 +69,7 @@ describe "Publisher Publishing Messages" do
end
sub.callback do
response.bytes.to_a.should eql(body.bytes.to_a)
expect(response.bytes.to_a).to eql(body.bytes.to_a)
EventMachine.stop
end
......@@ -95,7 +95,7 @@ describe "Publisher Publishing Messages" do
response_sub += chunk
if response_sub.include?('A')
response_sub.should eql(large_message + 'A')
expect(response_sub).to eql(large_message + 'A')
response_sub = ''
# check if getting old messages works fine too
......@@ -104,7 +104,7 @@ describe "Publisher Publishing Messages" do
response_sub_1 += chunk_1
if response_sub_1.include?('A')
response_sub_1.should eql(large_message + 'A')
expect(response_sub_1).to eql(large_message + 'A')
response_sub_1 = ''
publish_message_inline(channel, headers, small_message + 'B')
......@@ -123,11 +123,11 @@ describe "Publisher Publishing Messages" do
EM.add_timer(3) do
if response_sub.include?('B') && response_sub_1.include?('B')
response_sub.should eql(small_message + 'B')
response_sub_1.should eql(small_message + 'B')
expect(response_sub).to eql(small_message + 'B')
expect(response_sub_1).to eql(small_message + 'B')
large_message.size.should eql(4194304) # 4mb
small_message.size.should eql(10204) # 10k
expect(large_message.size).to eql(4194304) # 4mb
expect(small_message.size).to eql(10204) # 10k
EventMachine.stop
end
end
......@@ -150,8 +150,8 @@ describe "Publisher Publishing Messages" do
response += chunk
end
pub.callback do
(Time.now - start).should be < 0.1 #should fast proccess message
response.strip.should eql('{"channel": "ch_test_publish_messages_with_template_patterns", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}')
expect(Time.now - start).to be < 0.1 #should fast proccess message
expect(response.strip).to eql('{"channel": "ch_test_publish_messages_with_template_patterns", "published_messages": "1", "stored_messages": "1", "subscribers": "0"}')
EventMachine.stop
end
end
......@@ -172,7 +172,7 @@ describe "Publisher Publishing Messages" do
recieved_messages = response.split("|")
if recieved_messages.length == messagens_to_publish
recieved_messages.last.should eql(body_prefix + messagens_to_publish.to_s)
expect(recieved_messages.last).to eql(body_prefix + messagens_to_publish.to_s)
EventMachine.stop
end
end
......@@ -202,10 +202,10 @@ describe "Publisher Publishing Messages" do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get
sub.stream do |chunk|
response = JSON.parse(chunk)
response["id"].to_i.should eql(1)
response["channel"].should eql(channel)
response["text"].should eql(body)
response["event_id"].should eql(event_id)
expect(response["id"].to_i).to eql(1)
expect(response["channel"]).to eql(channel)
expect(response["text"]).to eql(body)
expect(response["event_id"]).to eql(event_id)
EventMachine.stop
end
......@@ -225,10 +225,10 @@ describe "Publisher Publishing Messages" do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get
sub.stream do |chunk|
response = JSON.parse(chunk)
response["id"].to_i.should eql(1)
response["channel"].should eql(channel)
response["text"].should eql(body)
response["event_type"].should eql(event_type)
expect(response["id"].to_i).to eql(1)
expect(response["channel"]).to eql(channel)
expect(response["text"]).to eql(body)
expect(response["event_type"]).to eql(event_type)
EventMachine.stop
end
......@@ -248,10 +248,10 @@ describe "Publisher Publishing Messages" do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get
sub.stream do |chunk|
response = JSON.parse(chunk)
response["id"].to_i.should eql(1)
response["channel"].should eql(channel)
response["text"].should eql(body)
response["event_id"].should eql("")
expect(response["id"].to_i).to eql(1)
expect(response["channel"]).to eql(channel)
expect(response["text"]).to eql(body)
expect(response["event_id"]).to eql("")
EventMachine.stop
end
......@@ -262,10 +262,10 @@ describe "Publisher Publishing Messages" do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get
sub.stream do |chunk|
response = JSON.parse(chunk)
response["id"].to_i.should eql(2)
response["channel"].should eql(channel)
response["text"].should eql(body)
response["event_id"].should eql("")
expect(response["id"].to_i).to eql(2)
expect(response["channel"]).to eql(channel)
expect(response["text"]).to eql(body)
expect(response["event_id"]).to eql("")
EventMachine.stop
end
......@@ -285,12 +285,12 @@ describe "Publisher Publishing Messages" do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get
sub.stream do |chunk|
response = JSON.parse(chunk)
response["id"].to_i.should eql(1)
response["channel"].should eql(channel)
response["text"].should eql(body)
response["publish_time"].size.should eql(29)
expect(response["id"].to_i).to eql(1)
expect(response["channel"]).to eql(channel)
expect(response["text"]).to eql(body)
expect(response["publish_time"].size).to eql(29)
publish_time = Time.parse(response["publish_time"])
publish_time.to_i.should be_in_the_interval(now.to_i, now.to_i + 1)
expect(publish_time.to_i).to be_in_the_interval(now.to_i, now.to_i + 1)
EventMachine.stop
end
......@@ -315,10 +315,10 @@ describe "Publisher Publishing Messages" do
if lines.size > 1
lines.each_with_index do |line, i|
resp = JSON.parse(line)
resp["id"].to_i.should eql(i + 1)
resp["channel"].should eql(channel)
resp["text"].should eql(body)
resp["tag"].to_i.should eql(i)
expect(resp["id"].to_i).to eql(i + 1)
expect(resp["channel"]).to eql(channel)
expect(resp["text"]).to eql(body)
expect(resp["tag"].to_i).to eql(i)
end
end
......
......@@ -23,7 +23,7 @@ def publish_message_inline(channel, headers, body, delay=0.01, &block)
EM.add_timer(delay) do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s).post :head => headers, :body => body
pub.callback do
pub.should be_http_status(200)
expect(pub).to be_http_status(200)
block.call(pub) unless block.nil?
end
end
......@@ -39,7 +39,7 @@ def publish_message(channel, headers, body)
content = Zlib::GzipReader.new(StringIO.new(content)).read
end
response = JSON.parse(content)
response["channel"].to_s.should eql(channel)
expect(response["channel"].to_s).to eql(channel)
end
def post_to(path, headers, body)
......
......@@ -18,7 +18,7 @@ describe "Comunication Properties" do
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub.stream do |chunk|
chunk.should eql(conf.header_template)
expect(chunk).to eql(conf.header_template)
EventMachine.stop
end
end
......@@ -33,14 +33,14 @@ describe "Comunication Properties" do
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_1.callback do |chunk|
sub_1.should be_http_status(403).without_body
sub_1.response_header['X_NGINX_PUSHSTREAM_EXPLAIN'].should eql("Subscriber could not create channels.")
expect(sub_1).to be_http_status(403).without_body
expect(sub_1.response_header['X_NGINX_PUSHSTREAM_EXPLAIN']).to eql("Subscriber could not create channels.")
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s ).post :head => headers, :body => body
pub.callback do
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_2.stream do |chunk2|
chunk2.should eql(conf.header_template)
expect(chunk2).to eql(conf.header_template)
EventMachine.stop
end
end
......@@ -84,9 +84,9 @@ describe "Comunication Properties" do
end
EM.add_timer(17) do
response_1.should eql("#{conf.header_template}#{body}")
response_2.should eql("#{conf.header_template}#{body}")
response_3.should eql("#{conf.header_template}")
expect(response_1).to eql("#{conf.header_template}#{body}")
expect(response_2).to eql("#{conf.header_template}#{body}")
expect(response_3).to eql("#{conf.header_template}")
EventMachine.stop
end
end
......@@ -109,9 +109,9 @@ describe "Comunication Properties" do
lines = response.split("|")
if lines.length >= 3
lines[0].should eql("#{conf.header_template}")
lines[1].should eql("{\"duplicated\":\"#{channel}\", \"channel\":\"#{channel}\", \"message\":\"#{body}\", \"message_id\":\"1\"}")
lines[2].should eql("{\"duplicated\":\"\", \"channel\":\"\", \"message\":\" \", \"message_id\":\"-1\"}")
expect(lines[0]).to eql("#{conf.header_template}")
expect(lines[1]).to eql("{\"duplicated\":\"#{channel}\", \"channel\":\"#{channel}\", \"message\":\"#{body}\", \"message_id\":\"1\"}")
expect(lines[2]).to eql("{\"duplicated\":\"\", \"channel\":\"\", \"message\":\" \", \"message_id\":\"-1\"}")
EventMachine.stop
end
end
......@@ -135,9 +135,9 @@ describe "Comunication Properties" do
lines = response.split("|")
if lines.length >= 3
lines[0].should eql("#{conf.header_template}")
lines[1].should eql("{\"channel\":\"ch_test_message_and_channel_with_same_pattern_of_the_template~channel~~channel~~channel~~text~~text~~text~\", \"message\":\"~channel~~channel~~channel~~text~~text~~text~\", \"message_id\":\"1\"}")
lines[2].should eql("{\"channel\":\"\", \"message\":\" \", \"message_id\":\"-1\"}")
expect(lines[0]).to eql("#{conf.header_template}")
expect(lines[1]).to eql("{\"channel\":\"ch_test_message_and_channel_with_same_pattern_of_the_template~channel~~channel~~channel~~text~~text~~text~\", \"message\":\"~channel~~channel~~channel~~text~~text~~text~\", \"message_id\":\"1\"}")
expect(lines[2]).to eql("{\"channel\":\"\", \"message\":\" \", \"message_id\":\"-1\"}")
EventMachine.stop
end
end
......
......@@ -22,13 +22,13 @@ describe "Subscriber Connection Cleanup" do
sub.stream do |chunk|
response += chunk
response.should include(conf.header_template)
expect(response).to include(conf.header_template)
end
sub.callback do
stop = Time.now
time_diff_sec(start, stop).should be_in_the_interval(17, 17.5)
response.should include(conf.footer_template)
expect(time_diff_sec(start, stop)).to be_in_the_interval(17, 17.5)
expect(response).to include(conf.footer_template)
EventMachine.stop
end
end
......@@ -51,8 +51,8 @@ describe "Subscriber Connection Cleanup" do
sub.callback do
stop = Time.now
time_diff_sec(start, stop).should be_in_the_interval(17, 17.5)
chunks_received.should be_eql(5)
expect(time_diff_sec(start, stop)).to be_in_the_interval(17, 17.5)
expect(chunks_received).to be_eql(5)
EventMachine.stop
end
end
......@@ -68,10 +68,10 @@ describe "Subscriber Connection Cleanup" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_1.stream do |chunk|
response_1 += chunk
response_1.should include(conf.header_template)
expect(response_1).to include(conf.header_template)
end
sub_1.callback do
response_1.should include(conf.footer_template)
expect(response_1).to include(conf.footer_template)
end
sleep(2)
......@@ -80,19 +80,19 @@ describe "Subscriber Connection Cleanup" do
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_2.stream do |chunk|
response_2 += chunk
response_2.should include(conf.header_template)
expect(response_2).to include(conf.header_template)
end
sub_2.callback do
response_2.should include(conf.footer_template)
expect(response_2).to include(conf.footer_template)
response_4 = ''
sub_4 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_4.stream do |chunk|
response_4 += chunk
response_4.should include(conf.header_template)
expect(response_4).to include(conf.header_template)
end
sub_4.callback do
response_4.should include(conf.footer_template)
expect(response_4).to include(conf.footer_template)
EventMachine.stop
end
end
......@@ -103,10 +103,10 @@ describe "Subscriber Connection Cleanup" do
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_3.stream do |chunk|
response_3 += chunk
response_3.should include(conf.header_template)
expect(response_3).to include(conf.header_template)
end
sub_3.callback do
response_3.should include(conf.footer_template)
expect(response_3).to include(conf.footer_template)
end
end
......
This diff is collapsed.
This diff is collapsed.
......@@ -22,19 +22,19 @@ describe "Subscriber Padding by user agent" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(1100 + expected_size)
sub_1.response.should match padding_pattern
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(1100 + expected_size)
expect(sub_1.response).to match padding_pattern
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 2")
sub_2.callback do
sub_2.should be_http_status(200)
sub_2.response.size.should eql(4097 + expected_size)
expect(sub_2).to be_http_status(200)
expect(sub_2.response.size).to eql(4097 + expected_size)
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 3")
sub_3.callback do
sub_3.should be_http_status(200)
sub_3.response.size.should eql(expected_size)
expect(sub_3).to be_http_status(200)
expect(sub_3.response.size).to eql(expected_size)
EventMachine.stop
end
......@@ -55,19 +55,19 @@ describe "Subscriber Padding by user agent" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback {
sub_1.should be_http_status(200)
sub_1.response.size.should eql(500 + expected_size)
sub_1.response.should match padding_pattern
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(500 + expected_size)
expect(sub_1.response).to match padding_pattern
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 2")
sub_2.callback {
sub_2.should be_http_status(200)
sub_2.response.size.should eql(expected_size)
expect(sub_2).to be_http_status(200)
expect(sub_2.response.size).to eql(expected_size)
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 3")
sub_3.callback {
sub_3.should be_http_status(200)
sub_3.response.size.should eql(expected_size)
expect(sub_3).to be_http_status(200)
expect(sub_3.response.size).to eql(expected_size)
EventMachine.stop
}
......@@ -91,56 +91,56 @@ describe "Subscriber Padding by user agent" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
sub_1.response.should match padding_pattern
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
expect(sub_1.response).to match padding_pattern
i = 105
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
i = 221
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
i = 331
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
i = 435
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
i = 502
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(expected_padding + i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(expected_padding + i + expected_size)
i = 550
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1")
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(i + expected_size)
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(i + expected_size)
EventMachine.stop
end
......@@ -170,14 +170,14 @@ describe "Subscriber Padding by user agent" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?ua=test 1').get :head => headers
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response.size.should eql(1024 + expected_size)
sub_1.response.should match padding_pattern
expect(sub_1).to be_http_status(200)
expect(sub_1.response.size).to eql(1024 + expected_size)
expect(sub_1.response).to match padding_pattern
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?ua=test 2').get :head => headers
sub_2.callback do
sub_2.should be_http_status(200)
sub_2.response.size.should eql(expected_size)
expect(sub_2).to be_http_status(200)
expect(sub_2.response.size).to eql(expected_size)
EventMachine.stop
end
......
......@@ -13,9 +13,9 @@ describe "Subscriber Properties" do
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_1.callback do
sub_1.should be_http_status(304).without_body
sub_1.response_header['LAST_MODIFIED'].to_s.should eql("")
sub_1.response_header['ETAG'].to_s.should eql("")
expect(sub_1).to be_http_status(304).without_body
expect(sub_1.response_header['LAST_MODIFIED'].to_s).to eql("")
expect(sub_1.response_header['ETAG'].to_s).to eql("")
EventMachine.stop
end
end
......@@ -30,9 +30,9 @@ describe "Subscriber Properties" do
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => sent_headers
sub_1.callback do
sub_1.should be_http_status(304).without_body
Time.parse(sub_1.response_header['LAST_MODIFIED'].to_s).should eql(Time.parse(sent_headers['If-Modified-Since']))
sub_1.response_header['ETAG'].to_s.should eql(sent_headers['If-None-Match'])
expect(sub_1).to be_http_status(304).without_body
expect(Time.parse(sub_1.response_header['LAST_MODIFIED'].to_s)).to eql(Time.parse(sent_headers['If-Modified-Since']))
expect(sub_1.response_header['ETAG'].to_s).to eql(sent_headers['If-None-Match'])
EventMachine.stop
end
end
......@@ -53,10 +53,10 @@ describe "Subscriber Properties" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge({'If-Modified-Since' => Time.at(0).utc.strftime("%a, %d %b %Y %T %Z")})
sub_1.callback do
sub_1.should be_http_status(200)
sub_1.response_header['LAST_MODIFIED'].to_s.should_not eql("")
sub_1.response_header['ETAG'].to_s.should eql("1")
sub_1.response.should eql("#{body}")
expect(sub_1).to be_http_status(200)
expect(sub_1.response_header['LAST_MODIFIED'].to_s).not_to eql("")
expect(sub_1.response_header['ETAG'].to_s).to eql("1")
expect(sub_1.response).to eql("#{body}")
EventMachine.stop
end
end
......@@ -74,7 +74,7 @@ describe "Subscriber Properties" do
publish_message(channel, {}, body)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?callback=' + callback_function_name).get :head => headers.merge({'If-Modified-Since' => Time.at(0).utc.strftime("%a, %d %b %Y %T %Z")})
sub_1.callback do
sub_1.response.should eql("#{callback_function_name}([#{body}]);")
expect(sub_1.response).to eql("#{callback_function_name}([#{body}]);")
EventMachine.stop
end
end
......@@ -94,15 +94,15 @@ describe "Subscriber Properties" do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b2' + '?callback=' + callback_function_name).get :head => headers
sub_1.callback do
sub_1.response.should eql("#{callback_function_name}([#{body},#{body + "1"}]);")
expect(sub_1.response).to eql("#{callback_function_name}([#{body},#{body + "1"}]);")
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?callback=' + callback_function_name).get :head => headers.merge({'Last-Event-Id' => 'event_id'})
sub_2.callback do
sub_2.response.should eql("#{callback_function_name}([#{body + "1"}]);")
expect(sub_2.response).to eql("#{callback_function_name}([#{body + "1"}]);")
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?callback=' + callback_function_name).get :head => headers.merge({'If-Modified-Since' => Time.at(0).utc.strftime("%a, %d %b %Y %T %Z")})
sub_3.callback do
sub_3.response.should eql("#{callback_function_name}([#{body},#{body + "1"}]);")
expect(sub_3.response).to eql("#{callback_function_name}([#{body},#{body + "1"}]);")
EventMachine.stop
end
......@@ -124,7 +124,7 @@ describe "Subscriber Properties" do
sent_headers = headers.merge({'accept' => 'otherknown/value'})
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?callback=' + callback_function_name).get :head => sent_headers
sub_1.callback do
sub_1.response_header['CONTENT_TYPE'].should eql('application/javascript')
expect(sub_1.response_header['CONTENT_TYPE']).to eql('application/javascript')
EventMachine.stop
end
end
......@@ -146,12 +146,12 @@ describe "Subscriber Properties" do
actual_response << chunk
end
sub_1.callback do
sub_1.should be_http_status(200)
expect(sub_1).to be_http_status(200)
sub_1.response_header["CONTENT_ENCODING"].should eql("gzip")
expect(sub_1.response_header["CONTENT_ENCODING"]).to eql("gzip")
actual_response = Zlib::GzipReader.new(StringIO.new(actual_response)).read
actual_response.should eql("#{body}")
expect(actual_response).to eql("#{body}")
EventMachine.stop
end
end
......@@ -166,8 +166,8 @@ describe "Subscriber Properties" do
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_1.callback do
sub_1.response_header["EXPIRES"].should eql("Thu, 01 Jan 1970 00:00:01 GMT")
sub_1.response_header["CACHE_CONTROL"].should eql("no-cache, no-store, must-revalidate")
expect(sub_1.response_header["EXPIRES"]).to eql("Thu, 01 Jan 1970 00:00:01 GMT")
expect(sub_1.response_header["CACHE_CONTROL"]).to eql("no-cache, no-store, must-revalidate")
EventMachine.stop
end
end
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment