Commit 029daba7 authored by Wandenberg Peixoto's avatar Wandenberg Peixoto

refactoring tests to use nginx_test_helper and rspec gems

parent 67935a78
......@@ -3,49 +3,27 @@ h1(#tests). Tests <a name="tests" href="#">&nbsp;</a>
The tests for this module are written in Ruby, and are acceptance tests.
To run them is needed to have an environment with:
* Basic requirements
** ruby >= 1.8.7
** rubygems >= 1.6.2
** rake >= 0.8.7
* Required gems
** POpen4 >= 0.1.4
** em-http-request >= 0.2.14
** json >= 1.4.3
** ruby-debug >= 0.10.4
** jasmine >= 1.0.2.1
** nokogiri >= 1.5.0
** jshint >= 0.1.1
You can install these gems with bundler (bundler is required to be installed before, _gem install bundler_)
* ruby >= 1.9.3
* bundler >= 1.1.4
<pre>
cd test/
bundle install --without docs
</pre>
or individually
and install required gems doing:
<pre>
gem install POpen4 -v 0.1.4
gem install em-http-request -v 0.2.14
gem install json -v 1.4.3
gem install ruby-debug -v 0.10.4
gem install jasmine -v 1.0.2.1
gem install nokogiri -v 1.5.0
gem install jshint -v 0.1.1
cd misc/
bundle install --without docs
</pre>
Then issue @rake tests@.
This command run the tests using nginx *executable* located at _/usr/local/nginx/sbin/nginx_ with _1_ *worker* responding at *host* _localhost_ and *port* _9990_.
Then issue @rake spec@.
This command run the tests using nginx *executable* located at _/usr/local/nginx/sbin/nginx_ with _1_ *worker* responding at *host* _127.0.0.1_ and *port* _9990_.
To change this behavior use the commands bellow
<pre>
rake tests executable="../build/nginx-1.1.15/objs/nginx" # to change default path for nginx executable
rake tests host=my_machine # to change default hostname
rake tests port=9889 # to change default port
rake tests workers=2 # to change dafault number of workers used
NGINX_EXEC="../build/nginx-1.1.15/objs/nginx" rake spec # to change default path for nginx executable
NGINX_HOST="my_machine" rake spec # to change default hostname
NGINX_PORT=9889 rake spec # to change default port
NGINX_WORKERS=2 rake spec # to change dafault number of workers used
and can combine any of these parameters, like:
rake tests port=9889 executable="../build/nginx-1.1.15/objs/nginx"
NGINX_PORT=9889 NGINX_EXEC="../build/nginx-1.1.15/objs/nginx" rake spec
</pre>
rvm ruby-1.9.3-p194@nginx-push-stream-module --create
source "https://rubygems.org"
gem 'rake', '~> 10.0.3'
group :test do
gem 'rspec', '~> 2.12.0'
gem 'em-http-request', '~> 1.0.3'
gem 'nginx_test_helper', '~> 0.0.1'
gem 'jshintrb', '~> 0.2.1'
gem 'therubyracer', '~> 0.11.3'
gem 'jasmine', '~> 1.3.1'
gem 'listen', '~> 0.7.2'
gem 'rb-inotify', '~> 0.8.8'
gem 'json', '~> 1.7.6'
gem 'debugger', '~> 1.3.1'
end
group :docs do
gem 'github-markup', '~> 0.7.5', :require => 'github/markup'
gem 'RedCloth', '~> 4.2.9'
gem 'nokogiri', '~> 1.5.6'
end
GEM
remote: https://rubygems.org/
specs:
Platform (0.4.0)
RedCloth (4.2.9)
addressable (2.3.2)
childprocess (0.3.8)
ffi (~> 1.0, >= 1.0.11)
columnize (0.3.6)
cookiejar (0.3.0)
debugger (1.3.1)
columnize (>= 0.3.1)
debugger-linecache (~> 1.1.1)
debugger-ruby_core_source (~> 1.1.8)
debugger-linecache (1.1.2)
debugger-ruby_core_source (>= 1.1.1)
debugger-ruby_core_source (1.1.8)
diff-lcs (1.1.3)
em-http-request (1.0.3)
addressable (>= 2.2.3)
cookiejar
em-socksify
eventmachine (>= 1.0.0.beta.4)
http_parser.rb (>= 0.5.3)
em-socksify (0.2.1)
eventmachine (>= 1.0.0.beta.4)
eventmachine (1.0.0)
execjs (1.4.0)
multi_json (~> 1.0)
ffi (1.3.1)
github-markup (0.7.5)
http_parser.rb (0.5.3)
jasmine (1.3.1)
jasmine-core (~> 1.3.1)
rack (~> 1.0)
rspec (>= 1.3.1)
selenium-webdriver (>= 0.1.3)
jasmine-core (1.3.1)
jshintrb (0.2.1)
execjs
multi_json (>= 1.3)
rake
json (1.7.6)
libv8 (3.11.8.13)
listen (0.7.2)
multi_json (1.5.0)
nginx_test_helper (0.0.1)
popen4
nokogiri (1.5.6)
open4 (1.3.0)
popen4 (0.1.2)
Platform (>= 0.4.0)
open4 (>= 0.4.0)
rack (1.5.2)
rake (10.0.3)
rb-inotify (0.8.8)
ffi (>= 0.5.0)
ref (1.0.2)
rspec (2.12.0)
rspec-core (~> 2.12.0)
rspec-expectations (~> 2.12.0)
rspec-mocks (~> 2.12.0)
rspec-core (2.12.2)
rspec-expectations (2.12.1)
diff-lcs (~> 1.1.3)
rspec-mocks (2.12.2)
rubyzip (0.9.9)
selenium-webdriver (2.29.0)
childprocess (>= 0.2.5)
multi_json (~> 1.0)
rubyzip
websocket (~> 1.0.4)
therubyracer (0.11.3)
libv8 (~> 3.11.8.12)
ref
websocket (1.0.7)
PLATFORMS
ruby
DEPENDENCIES
RedCloth (~> 4.2.9)
debugger (~> 1.3.1)
em-http-request (~> 1.0.3)
github-markup (~> 0.7.5)
jasmine (~> 1.3.1)
jshintrb (~> 0.2.1)
json (~> 1.7.6)
listen (~> 0.7.2)
nginx_test_helper (~> 0.0.1)
nokogiri (~> 1.5.6)
rake (~> 10.0.3)
rb-inotify (~> 0.8.8)
rspec (~> 2.12.0)
therubyracer (~> 0.11.3)
base_dir = File.expand_path('..', File.dirname(__FILE__))
project_dir = File.expand_path('..', File.dirname(__FILE__))
javascript_dir = File.expand_path(Dir["#{project_dir}/**/js"].first)
require 'rubygems'
# Set up gems listed in the Gemfile.
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('./Gemfile', File.dirname(__FILE__))
require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE'])
Bundler.require(:default, :test) if defined?(Bundler)
begin
require "rspec/core/rake_task"
desc "Run all examples"
RSpec::Core::RakeTask.new(:spec) do |t|
t.rspec_opts = %w[--color --debug]
t.pattern = '**/*_spec.rb'
end
rescue LoadError
task :spec do
abort "RSpec is not available. In order to run rspec, you must: (sudo) gem install rspec"
end
end
begin
load 'jasmine/tasks/jasmine.rake'
task "jasmine:require" => ["jshint", "configure_jasmine", "monitor_js"]
task :configure_jasmine do
Jasmine.configure do |config|
config.spec_dir = project_dir
config.spec_files = lambda { Dir["#{project_dir}/misc/spec/javascripts/helpers/**/*.js"] + Dir["#{project_dir}/misc/**/*[sS]pec.js"] }
js_tmp_dir = File.expand_path('pushstream/js', Dir.tmpdir)
config.src_dir = js_tmp_dir
config.src_files = lambda { Dir["#{js_tmp_dir}/**/*.js"] }
end
end
task :monitor_js do
copy_inner_js = Proc.new do |modified, added, removed|
modified.each do |file|
destiny_path = File.dirname(file).gsub(/.*\/js\/?/, File.expand_path('pushstream/js', Dir.tmpdir))
FileUtils.mkdir_p(destiny_path)
content = File.read file
content.gsub!('(function (window, document, undefined) {', '')
content.gsub!('if (window.PushStream) { return; }', '')
content.gsub!('})(window, document);', '')
File.open(File.join(destiny_path, File.basename(file)), 'w') {|f| f.write content }
end
end
copy_inner_js.call([[File.expand_path('misc/js/pushstream.js', project_dir)], [], []])
listener = Listen.to(File.expand_path('misc/js', project_dir), :filter => /\.js$/)
listener.change(&copy_inner_js)
listener.start(false)
end
rescue LoadError
desc "Run javascript tests"
task :jasmine do
abort "Jasmine is not available. In order to run jasmine, you must: (sudo) gem install jasmine"
end
end
begin
require "jshintrb/jshinttask"
Jshintrb::JshintTask.new :jshint do |t|
t.pattern = "#{javascript_dir}/pushstream.js"
t.options = :defaults
end
rescue LoadError
desc "Run jshint on js"
task :jshint do
abort "Jshintrb is not available. In order to run jshint, you must: (sudo) gem install jshintrb"
end
end
namespace :docs do
begin
Bundler.require(:default, :docs) if defined?(Bundler)
task :get_static_files do
base_path = File.expand_path('pushstream/docs/preview', Dir.tmpdir)
FileUtils.mkdir_p("#{base_path}/css")
download_file("https://a248.e.akamai.net/assets.github.com/assets/github-00f65189aa131d891441bde58bf26d999ab4d29c.css", "#{base_path}/css/github.css") unless File.exists?("#{base_path}/css/github.css")
download_file("https://a248.e.akamai.net/assets.github.com/assets/github2-90ff47ca514fab587fdf9f40626c56af77a8fa6b.css", "#{base_path}/css/github2.css") unless File.exists?("#{base_path}/css/github2.css")
end
desc "Generates docs files to preview."
task :generate do
task :generate => :get_static_files do
require 'erb'
require 'github/markup'
template = ERB.new File.read("#{base_dir}/misc/github_template.html.erb")
template = ERB.new File.read("#{project_dir}/misc/github_template.html.erb")
base_path = File.expand_path('pushstream/docs/preview', Dir.tmpdir)
Dir.glob("#{base_dir}/**/*.textile").each do |file|
Dir.glob("#{project_dir}/**/*.textile").each do |file|
filename = File.basename(file)
content = GitHub::Markup.render(file, File.read(file))
rendered = template.result(binding)
output = file.gsub(base_dir, "#{base_dir}/misc").gsub(".textile", ".html")
output = file.gsub(project_dir, File.expand_path('pushstream/docs/preview', Dir.tmpdir)).gsub(".textile", ".html")
output_dir = File.dirname(output)
FileUtils.mkdir_p(output_dir) unless File.exists?(output_dir)
FileUtils.mkdir_p(output_dir)
File.open(output, 'w') {|f| f.write(rendered) }
puts "Preview rendered to #{output}"
end
......@@ -22,22 +108,42 @@ namespace :docs do
desc "Convert docs to Nginx wiki format."
task :convert_to_wiki do
require 'redcloth'
require 'nokogiri'
Dir.glob("#{base_dir}/**/*.textile").each do |file|
Dir.glob("#{project_dir}/**/*.textile").each do |file|
filename = File.basename(file)
content = File.read(file)
output = file.gsub(base_dir, "#{base_dir}/misc").gsub(".textile", ".html")
output_wiki = file.gsub(base_dir, "#{base_dir}/misc").gsub(".textile", ".wiki")
output_dir = File.dirname(output)
FileUtils.mkdir_p(output_dir) unless File.exists?(output_dir)
output = file.gsub(project_dir, File.expand_path('pushstream/docs/html', Dir.tmpdir)).gsub(".textile", ".html")
output_wiki = file.gsub(project_dir, File.expand_path('pushstream/docs/wiki', Dir.tmpdir)).gsub(".textile", ".wiki")
FileUtils.mkdir_p(File.dirname(output))
FileUtils.mkdir_p(File.dirname(output_wiki))
File.open(output, 'w') {|f| f.write(RedCloth.new(content).to_html) }
File.open(output_wiki, 'w') {|f| f.write(convert_to_wiki_syntax(content)) }
puts "Wiki converted to #{output_wiki}"
end
end
rescue LoadError
desc "Generates docs files to preview."
task :generate do
abort "github-markup is not available. In order to run docs:generate, you must: (sudo) gem install github-markup"
end
desc "Convert docs to Nginx wiki format."
task :convert_to_wiki do
abort "RedCloth or nokogiri is not available. In order to run docs:convert_to_wiki, you must: (sudo) gem install RedCloth nokogiri"
end
end
def download_file(url, output_file)
EventMachine.run do
http = EventMachine::HttpRequest.new(url).get
http.errback { EM.stop }
http.callback do
File.open(output_file, "w") { |f| f.write(http.response) } if (http.response_header.status == 200)
EM.stop
end
end
end
def convert_to_wiki_syntax(text)
doc = Nokogiri::HTML(RedCloth.new(text).to_html)
......@@ -99,65 +205,4 @@ namespace :docs do
"<#{tag}>#{text}</#{tag}>"
end
end
end
desc "Run all tests."
task :tests, :executable, :host, :port, :workers, :tests_tmp_dir do |t, args|
ENV['NGINX_EXEC'] ||= args[:executable] || nil
ENV['NGINX_HOST'] ||= args[:host] || nil
ENV['NGINX_PORT'] ||= args[:port] || nil
ENV['NGINX_WORKERS'] ||= args[:workers] || nil
ENV['NGINX_TESTS_TMP_DIR'] ||= args[:tests_tmp_dir] || nil
require 'test/unit'
Dir.glob('test_*.rb').each do|f|
test_case = "#{base_dir}/test/#{f}".gsub('.rb', '')
require test_case
end
end
begin
require 'listen'
require 'jasmine'
load 'jasmine/tasks/jasmine.rake'
task "jasmine:require" => "monitor_js"
task :monitor_js do
copy_inner_js = Proc.new do |modified, added, removed|
modified.each do |file|
destiny_path = File.dirname(file).gsub('misc/js', 'test/tmp/js')
FileUtils.mkdir_p(destiny_path)
content = File.read file
content.gsub!('(function (window, document, undefined) {', '')
content.gsub!('if (window.PushStream) { return; }', '')
content.gsub!('})(window, document);', '')
File.open(File.join(destiny_path, File.basename(file)), 'w') {|f| f.write content }
end
end
copy_inner_js.call([File.expand_path('misc/js/pushstream.js', base_dir)])
listener = Listen.to(File.expand_path('misc/js', base_dir), :filter => /\.js$/)
listener.change(&copy_inner_js)
listener.start(false)
end
rescue LoadError
task :jasmine do
abort "Jasmine is not available. In order to run jasmine, you must: (sudo) gem install jasmine"
end
end
begin
require "jshintrb/jshinttask"
Jshintrb::JshintTask.new :jshint do |t|
t.pattern = '../misc/js/pushstream.js'
t.options = :defaults
end
rescue LoadError
task :jshint do
abort "Jshintrb is not available. In order to run jshint, you must: (sudo) gem install jshintrb"
end
end
......@@ -4,11 +4,10 @@
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta charset="utf-8">
<title><%= filename %> - Preview to GitHub</title>
<base href="file://<%= base_path %>/" />
<link href="https://a248.e.akamai.net/assets.github.com/stylesheets/bundles/github-fe72294c0899f9951f2200f65bc4cf1f7880f77d.css" media="screen" rel="stylesheet" type="text/css" />
<link href="https://a248.e.akamai.net/assets.github.com/stylesheets/bundles/github2-cb54f8b424bab1addc65760cf760d406bcc92480.css" media="screen" rel="stylesheet" type="text/css" />
<script src="https://a248.e.akamai.net/assets.github.com/javascripts/bundles/jquery-1ff4761a0d9866a948321eac8d969db3dc12938e.js" type="text/javascript"></script>
<script src="https://a248.e.akamai.net/assets.github.com/javascripts/bundles/github-18a50b3d097d5e6f5686d2be4c43a5fc49547aa5.js" type="text/javascript"></script>
<link href="css/github.css" media="screen" rel="stylesheet" type="text/css" />
<link href="css/github2.css" media="screen" rel="stylesheet" type="text/css" />
<style>
#preview-content .markdown-body, #preview-content .plain {
background-color: #FFFFFF;
......
......@@ -35,12 +35,12 @@
var valueToTwoDigits = function (value) {
return ((value < 10) ? '0' : '') + value;
}
};
var dateToUTCString = function (date) {
var time = valueToTwoDigits(date.getUTCHours()) + ':' + valueToTwoDigits(date.getUTCMinutes()) + ':' + valueToTwoDigits(date.getUTCSeconds());
return days[date.getUTCDay()] + ', ' + valueToTwoDigits(date.getUTCDate()) + ' ' + months[date.getUTCMonth()] + ' ' + date.getUTCFullYear() + ' ' + time + ' GMT';
}
};
var extend = function () {
var object = arguments[0] || {};
......
require 'spec_helper'
describe "Broadcast Properties" do
let(:config) do
{
:authorized_channels_only => "on",
:header_template => 'connected',
:broadcast_channel_prefix => "XXX_"
}
end
it "should identify broadcast channels by prefix" do
channel = 'ch_test_broadcast_channel_prefix'
channel_broad = 'XXX_123'
channel_broad_fail = 'YYY_123'
body = 'broadcast channel prefix'
nginx_run_server(config, :timeout => 5) do |conf|
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s ).post :head => headers, :body => body, :timeout => 30
pub.callback do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad_fail).get :head => headers, :timeout => 60
sub_1.callback do |chunk|
sub_1.response_header.status.should eql(403)
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad).get :head => headers, :timeout => 60
sub_2.stream do |chunk2|
chunk2.should eql("#{conf.header_template}\r\n")
EventMachine.stop
end
end
end
end
end
end
it "should limit the number of broadcast channels in the same request" do
channel = 'ch_test_broadcast_channel_max_qtd'
channel_broad1 = 'XXX_123'
channel_broad2 = 'XXX_321'
channel_broad3 = 'XXX_213'
body = 'broadcast channel prefix'
nginx_run_server(config.merge(:broadcast_channel_max_qtd => 2), :timeout => 5) do |conf|
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s ).post :head => headers, :body => body, :timeout => 30
pub.callback do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad1 + '/' + channel_broad2 + '/' + channel_broad3).get :head => headers, :timeout => 60
sub_1.callback do |chunk|
sub_1.response_header.status.should eql(403)
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '/' + channel_broad1 + '/' + channel_broad2).get :head => headers, :timeout => 60
sub_2.stream do
EventMachine.stop
end
end
end
end
end
end
end
This diff is collapsed.
This diff is collapsed.
require 'spec_helper'
describe "Keepalive" do
let(:config) do
{
:shared_memory_size => '256m',
:keepalive => "on",
:header_template => '',
:message_template => '~text~',
:footer_template => ''
}
end
it "should create many channels on the same socket" do
channel = 'ch_test_create_many_channels_'
body = 'channel started'
channels_to_be_created = 4000
nginx_run_server(config, :timeout => 25) do |conf|
0.step(channels_to_be_created - 1, 500) do |i|
socket = open_socket(nginx_host, nginx_port)
1.upto(500) do |j|
headers, body = post_in_socket("/pub?id=#{channel}#{i + j}", body, socket, "}\r\n")
headers.should include("HTTP/1.1 200 OK")
end
socket.close
end
end
end
it "should execute different operations using the same socket" do
channel = 'ch_test_different_operation_with_keepalive'
content = 'message to be sent'
nginx_run_server(config, :timeout => 5) do |conf|
socket = open_socket(nginx_host, nginx_port)
headers, body = get_in_socket("/pub", socket)
body.should eql("")
headers.should include("No channel id provided.")
headers, body = post_in_socket("/pub?id=#{channel}", content, socket, "}\r\n")
body.should eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
headers, body = get_in_socket("/channels-stats", socket)
body.should match_the_pattern(/"channels": "1", "broadcast_channels": "0", "published_messages": "1", "subscribers": "0", "uptime": "[0-9]*", "by_worker": \[\r\n/)
body.should match_the_pattern(/\{"pid": "[0-9]*", "subscribers": "0", "uptime": "[0-9]*"\}/)
headers, body = get_in_socket("/pub?id=#{channel}", socket)
body.should eql("{\"channel\": \"#{channel}\", \"published_messages\": \"1\", \"stored_messages\": \"1\", \"subscribers\": \"0\"}\r\n")
socket.close
end
end
end
require 'spec_helper'
describe "Measure Memory" do
let(:config) do
{
:shared_memory_size => "2m",
:shared_memory_cleanup_objects_ttl => "60m",
:message_ttl => "60m",
:max_messages_stored_per_channel => nil,
:keepalive => "on",
:header_template => nil,
:message_template => nil,
:footer_template => nil,
:ping_message_interval => nil
}
end
message_estimate_size = 174
channel_estimate_size = 536
subscriber_estimate_size = 200
subscriber_estimate_system_size = 6560
it "should check message size" do
channel = 'ch_test_message_size'
body = '1'
nginx_run_server(config, :timeout => 5) do |conf|
shared_size = conf.shared_memory_size.to_i * 1024 * 1024
post_channel_message = "POST /pub?id=#{channel} HTTP/1.0\r\nContent-Length: #{body.size}\r\n\r\n#{body}"
socket = open_socket(nginx_host, nginx_port)
while (true) do
socket.print(post_channel_message)
resp_headers, resp_body = read_response_on_socket(socket, "}\r\n")
break unless resp_headers.match(/200 OK/)
end
socket.close
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get
pub_2.callback do
pub_2.response_header.status.should eql(200)
pub_2.response_header.content_length.should_not eql(0)
resp = JSON.parse(pub_2.response)
expected_message = shared_size / (message_estimate_size + body.size)
resp["published_messages"].to_i.should be_within(20).of(expected_message)
EventMachine.stop
end
end
end
end
it "should check channel size" do
body = '1'
nginx_run_server(config, :timeout => 1500) do |conf|
shared_size = conf.shared_memory_size.to_i * 1024 * 1024
socket = open_socket(nginx_host, nginx_port)
channel = 1000
while (true) do
post_channel_message = "POST /pub?id=#{channel} HTTP/1.0\r\nContent-Length: #{body.size}\r\n\r\n#{body}"
socket.print(post_channel_message)
resp_headers, resp_body = read_response_on_socket(socket, "}\r\n")
break unless resp_headers.match(/200 OK/)
channel += 1
end
socket.close
EventMachine.run do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get
pub_2.callback do
pub_2.response_header.status.should eql(200)
pub_2.response_header.content_length.should_not eql(0)
resp = JSON.parse(pub_2.response)
expected_channel = (shared_size - ((body.size + message_estimate_size) * resp["published_messages"].to_i)) / (channel_estimate_size + 4) # 4 channel id size
resp["channels"].to_i.should be_within(10).of(expected_channel)
EventMachine.stop
end
end
end
end
it "should check subscriber size" do
nginx_run_server(config.merge({:shared_memory_size => "300k", :header_template => "H"}), :timeout => 5) do |conf|
shared_size = conf.shared_memory_size.to_i * 1024 #shm size is in kbytes for this test
EventMachine.run do
subscriber_in_loop(1000, headers) do
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers
pub_2.callback do
pub_2.response_header.status.should eql(200)
pub_2.response_header.content_length.should_not eql(0)
resp = JSON.parse(pub_2.response)
expected_subscriber = (shared_size - ((channel_estimate_size + 4) * resp["channels"].to_i)) / subscriber_estimate_size # 4 channel id size
resp["subscribers"].to_i.should be_within(10).of(expected_subscriber)
EventMachine.stop
end
end
end
end
end
it "should check subscriber system size" do
channel = 'ch_test_subscriber_system_size'
body = '1'
nginx_run_server(config.merge({:header_template => "H"}), :timeout => 15) do |conf|
#warming up
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_i.to_s).get :head => headers, :body => body
sub.stream do |chunk|
EventMachine.stop
end
end
per_subscriber = 0
EventMachine.run do
memory_1 = `ps -eo rss,cmd | grep -E 'ngin[xX] -c '`.split(' ')[0].to_i
subscriber_in_loop_with_limit(channel, headers, body, 1000, 1499) do
sleep(1)
memory_2 = `ps -eo rss,cmd | grep -E 'ngin[xX] -c '`.split(' ')[0].to_i
per_subscriber = ((memory_2 - memory_1).to_f / 500) * 1000
EventMachine.stop
end
end
per_subscriber.should be_within(10).of(subscriber_estimate_system_size)
end
end
end
def subscriber_in_loop(channel, headers, &block)
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_i.to_s).get :head => headers
sub.stream do |chunk|
subscriber_in_loop(channel.to_i + 1, headers) do
yield block
end
end
sub.callback do
block.call
end
end
def subscriber_in_loop_with_limit(channel, headers, body, start, limit, &block)
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_i.to_s).get :head => headers, :body => body
sub.stream do |chunk|
if start == limit
block.call
EventMachine.stop
end
subscriber_in_loop_with_limit(channel, headers, body, start + 1, limit) do
yield block
end
end
sub.callback do
block.call
end
end
require 'spec_helper'
describe "Send Signals" do
workers = 1
old_cld_trap = nil
before do
workers = ENV['NGINX_WORKERS']
ENV['NGINX_WORKERS'] = '1'
old_cld_trap = Signal.trap("CLD", "IGNORE")
end
after do
ENV['NGINX_WORKERS'] = workers
Signal.trap("CLD", old_cld_trap)
end
let(:config) do
{
:master_process => 'on',
:daemon => 'on',
:header_template => 'HEADER',
:shared_memory_cleanup_objects_ttl => '40s',
:message_ttl => '60s',
:subscriber_connection_ttl => '65s'
}
end
it "should reload normaly when receives HUP signal" do
channel = 'ch_test_send_hup_signal'
body = 'body'
response = response2 = ''
pid = pid2 = 0
nginx_run_server(config, :timeout => 60) do |conf|
EventMachine.run do
# create subscriber
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers, :timeout => 30
sub_1.stream do |chunk|
response = response + chunk
if response.strip == conf.header_template
# check statistics
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :timeout => 30
pub_1.callback do
pub_1.response_header.status.should eql(200)
pub_1.response_header.content_length.should_not eql(0)
resp_1 = JSON.parse(pub_1.response)
resp_1.has_key?("channels").should be_true
resp_1["channels"].to_i.should eql(1)
resp_1["by_worker"].count.should eql(1)
pid = resp_1["by_worker"][0]['pid'].to_i
# send reload signal
`#{ nginx_executable } -c #{ conf.configuration_filename } -s reload > /dev/null 2>&1`
end
end
end
conectted_after_reloaded = false
i = 0
# check if first worker die
EM.add_periodic_timer(0.5) do
# check statistics again
pub_4 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :timeout => 30
pub_4.callback do
resp_3 = JSON.parse(pub_4.response)
resp_3.has_key?("by_worker").should be_true
if resp_3["by_worker"].count == 2 && !conectted_after_reloaded
conectted_after_reloaded = true
# publish a message
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s).post :head => headers, :body => body, :timeout => 30
pub_2.callback do
# add new subscriber
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 30
sub_2.stream do |chunk|
response2 = response2 + chunk
if response2.strip == conf.header_template
# check statistics again
pub_3 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :timeout => 30
pub_3.callback do
resp_2 = JSON.parse(pub_3.response)
resp_2.has_key?("channels").should be_true
resp_2["channels"].to_i.should eql(1)
resp_2["published_messages"].to_i.should eql(1)
resp_2["subscribers"].to_i.should eql(2)
resp_2["by_worker"].count.should eql(2)
end
end
end
end
end
if resp_3["by_worker"].count == 1 && conectted_after_reloaded
resp_3["channels"].to_i.should eql(1)
resp_3["published_messages"].to_i.should eql(1)
resp_3["subscribers"].to_i.should eql(1)
resp_3["by_worker"].count.should eql(1)
pid2 = resp_3["by_worker"][0]['pid'].to_i
pid.should_not eql(pid2)
EventMachine.stop
end
i = i + 1
if i == 120
fail("Worker didn't die in 60 seconds")
EventMachine.stop
end
end
end
end
end
end
it "should ignore changes on shared memory size when doing a reload" do
channel = 'ch_test_reload_with_different_shared_memory_size'
body = 'body'
response = response2 = ''
pid = pid2 = 0
nginx_run_server(config, :timeout => 10) do |conf|
EventMachine.run do
publish_message_inline(channel, {}, body)
# check statistics
pub_1 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :timeout => 30
pub_1.callback do
pub_1.response_header.status.should eql(200)
pub_1.response_header.content_length.should_not eql(0)
resp_1 = JSON.parse(pub_1.response)
resp_1.has_key?("channels").should be_true
resp_1["channels"].to_i.should eql(1)
resp_1["published_messages"].to_i.should eql(1)
conf.configuration[:shared_memory_size] = '20m'
conf.create_configuration_file
# send reload signal
`#{ nginx_executable } -c #{ conf.configuration_filename } -s reload > /dev/null 2>&1`
sleep 5
pub_2 = EventMachine::HttpRequest.new(nginx_address + '/channels-stats').get :head => headers, :timeout => 30
pub_2.callback do
pub_2.response_header.status.should eql(200)
pub_2.response_header.content_length.should_not eql(0)
resp_2 = JSON.parse(pub_2.response)
resp_2.has_key?("channels").should be_true
resp_2["channels"].to_i.should eql(1)
resp_2["published_messages"].to_i.should eql(1)
error_log = File.read(conf.error_log)
error_log.should include("Cannot change memory area size without restart, ignoring change")
EventMachine.stop
end
end
end
end
end
end
require 'spec_helper'
describe "Setup Parameters" do
it "should not accept '0' as ping message interval" do
nginx_test_configuration({:ping_message_interval => 0}).should include("push_stream_ping_message_interval cannot be zero")
end
it "should not accept a blank message template" do
nginx_test_configuration({:message_template => ""}).should include("push_stream_message_template cannot be blank")
end
it "should not accept '0' as subscriber connection ttl" do
nginx_test_configuration({:subscriber_connection_ttl => 0}).should include("push_stream_subscriber_connection_ttl cannot be zero")
end
it "should not accept '0' as long polling subscriber connection ttl" do
nginx_test_configuration({:longpolling_connection_ttl => 0}).should include("push_stream_longpolling_connection_ttl cannot be zero")
end
it "should not accept '0' as max channel id length" do
nginx_test_configuration({:max_channel_id_length => 0}).should include("push_stream_max_channel_id_length cannot be zero")
end
it "should not accept '0' as message ttl" do
nginx_test_configuration({:message_ttl => 0}).should include("push_stream_message_ttl cannot be zero")
end
it "should not accept '0' as max subscribers per channel" do
nginx_test_configuration({:max_subscribers_per_channel => 0}).should include("push_stream_max_subscribers_per_channel cannot be zero")
end
it "should not accept '0' as max messages stored per channel" do
nginx_test_configuration({:max_messages_stored_per_channel => 0}).should include("push_stream_max_messages_stored_per_channel cannot be zero")
end
it "should not accept '0' as max number of channels" do
nginx_test_configuration({:max_number_of_channels => 0}).should include("push_stream_max_number_of_channels cannot be zero")
end
it "should not accept '0' as max number of broadcast channels" do
nginx_test_configuration({:max_number_of_broadcast_channels => 0}).should include("push_stream_max_number_of_broadcast_channels cannot be zero")
end
it "should not accept '0' as max broadcast channels" do
nginx_test_configuration({:broadcast_channel_max_qtd => 0}).should include("push_stream_broadcast_channel_max_qtd cannot be zero")
end
it "should not set max broadcast channels without set boadcast channel prefix" do
nginx_test_configuration({:broadcast_channel_max_qtd => 1, :broadcast_channel_prefix => ""}).should include("cannot set broadcast channel max qtd if push_stream_broadcast_channel_prefix is not set or blank")
end
it "should not accept '0' as max number of broadcast channels" do
config = {:max_number_of_broadcast_channels => 3, :broadcast_channel_max_qtd => 4, :broadcast_channel_prefix => "broad_"}
nginx_test_configuration(config).should include("max number of broadcast channels cannot be smaller than value in push_stream_broadcast_channel_max_qtd")
end
it "should not accept a value less than '30s' as shared memory cleanup objects ttl" do
nginx_test_configuration({:shared_memory_cleanup_objects_ttl => '15s'}).should include("memory cleanup objects ttl cannot't be less than 30.")
end
it "should accept a configuration without http block" do
config = {
:configuration_template => %q{
pid <%= pid_file %>;
error_log <%= error_log %> debug;
# Development Mode
master_process off;
daemon off;
worker_processes <%= nginx_workers %>;
events {
worker_connections 1024;
use <%= (RUBY_PLATFORM =~ /darwin/) ? 'kqueue' : 'epoll' %>;
}
}
}
nginx_test_configuration(config).should include("ngx_http_push_stream_module will not be used with this configuration.")
end
it "should not accept an invalid push mode" do
nginx_test_configuration({:subscriber_mode => "unknown"}).should include("invalid push_stream_subscriber mode value: unknown, accepted values (streaming, polling, long-polling)")
end
it "should accept the known push modes" do
nginx_test_configuration({:subscriber_mode => ""}).should_not include("invalid push_stream_subscriber mode value")
nginx_test_configuration({:subscriber_mode => "streaming"}).should_not include("invalid push_stream_subscriber mode value")
nginx_test_configuration({:subscriber_mode => "polling"}).should_not include("invalid push_stream_subscriber mode value")
nginx_test_configuration({:subscriber_mode => "long-polling"}).should_not include("invalid push_stream_subscriber mode value")
end
it "should not accept an invalid publisher mode" do
nginx_test_configuration({:publisher_mode => "unknown"}).should include("invalid push_stream_publisher mode value: unknown, accepted values (normal, admin)")
end
it "should accept the known publisher modes" do
nginx_test_configuration({:publisher_mode => ""}).should_not include("invalid push_stream_publisher mode value")
nginx_test_configuration({:publisher_mode => "normal"}).should_not include("invalid push_stream_publisher mode value")
nginx_test_configuration({:publisher_mode => "admin"}).should_not include("invalid push_stream_publisher mode value")
end
it "should not accept enable event source on publisher location" do
config = {
:extra_location => %q{
location ~ /test/ {
push_stream_publisher;
push_stream_eventsource_support on;
}
}
}
nginx_test_configuration(config).should include("event source support is only available on subscriber location")
end
it "should not accept enable event source on statistics location" do
config = {
:extra_location => %q{
location ~ /test/ {
push_stream_channels_statistics;
push_stream_eventsource_support on;
}
}
}
nginx_test_configuration(config).should include("event source support is only available on subscriber location")
end
it "should not accept enable event source on websocket location" do
config = {
:extra_location => %q{
location ~ /test/ {
push_stream_websocket;
push_stream_eventsource_support on;
}
}
}
nginx_test_configuration(config).should include("event source support is only available on subscriber location")
end
it "should not accept an invalid pattern for padding by user agent" do
nginx_test_configuration({:padding_by_user_agent => "user_agent,as,df"}).should include("padding pattern not match the value user_agent,as,df")
nginx_test_configuration({:padding_by_user_agent => "user_agent;10;0"}).should include("padding pattern not match the value user_agent;10;0")
nginx_test_configuration({:padding_by_user_agent => "user_agent,10,0:other_user_agent;20;0:another_user_agent,30,0"}).should include("error applying padding pattern to other_user_agent;20;0:another_user_agent,30,0")
end
end
module NginxConfiguration
def self.default_configuration
{
:disable_start_stop_server => false,
:master_process => 'off',
:daemon => 'off',
:content_type => 'text/html; charset=utf-8',
:keepalive => 'off',
:ping_message_interval => '10s',
:header_template => %{<html><head><meta http-equiv=\\"Content-Type\\" content=\\"text/html; charset=utf-8\\">\\r\\n<meta http-equiv=\\"Cache-Control\\" content=\\"no-store\\">\\r\\n<meta http-equiv=\\"Cache-Control\\" content=\\"no-cache\\">\\r\\n<meta http-equiv=\\"Expires\\" content=\\"Thu, 1 Jan 1970 00:00:00 GMT\\">\\r\\n<script type=\\"text/javascript\\">\\r\\nwindow.onError = null;\\r\\ndocument.domain = \\'<%= nginx_host %>\\';\\r\\nparent.PushStream.register(this);\\r\\n</script>\\r\\n</head>\\r\\n<body onload=\\"try { parent.PushStream.reset(this) } catch (e) {}\\">},
:message_template => "<script>p(~id~,'~channel~','~text~');</script>",
:footer_template => "</body></html>",
:store_messages => 'on',
:subscriber_connection_ttl => nil,
:longpolling_connection_ttl => nil,
:message_ttl => '50m',
:max_channel_id_length => 200,
:max_subscribers_per_channel => nil,
:max_messages_stored_per_channel => 20,
:max_number_of_channels => nil,
:max_number_of_broadcast_channels => nil,
:broadcast_channel_max_qtd => 3,
:broadcast_channel_prefix => 'broad_',
:shared_memory_cleanup_objects_ttl => '5m',
:subscriber_mode => nil,
:publisher_mode => nil,
:padding_by_user_agent => nil,
:shared_memory_size => '10m',
:channel_deleted_message_text => nil,
:ping_message_text => nil,
:last_received_message_time => nil,
:last_received_message_tag => nil,
:user_agent => nil,
:authorized_channels_only => 'off',
:allowed_origins => nil,
:eventsource_support => 'off',
:client_max_body_size => '32k',
:client_body_buffer_size => '32k',
:extra_location => ''
}
end
def self.template_configuration
%(
pid <%= pid_file %>;
error_log <%= error_log %> debug;
# Development Mode
master_process <%= master_process %>;
daemon <%= daemon %>;
worker_processes <%= nginx_workers %>;
events {
worker_connections 1024;
use <%= (RUBY_PLATFORM =~ /darwin/) ? 'kqueue' : 'epoll' %>;
}
http {
default_type application/octet-stream;
access_log <%= access_log %>;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 100;
send_timeout 10;
client_body_timeout 10;
client_header_timeout 10;
sendfile on;
client_header_buffer_size 1k;
large_client_header_buffers 2 4k;
client_max_body_size 1k;
client_body_buffer_size 1k;
ignore_invalid_headers on;
client_body_in_single_buffer on;
client_body_temp_path <%= client_body_temp %>;
<%= write_directive("push_stream_ping_message_interval", ping_message_interval, "ping frequency") %>
<%= write_directive("push_stream_message_template", message_template, "message template") %>
<%= write_directive("push_stream_subscriber_connection_ttl", subscriber_connection_ttl, "timeout for subscriber connections") %>
<%= write_directive("push_stream_longpolling_connection_ttl", longpolling_connection_ttl, "timeout for long polling connections") %>
<%= write_directive("push_stream_header_template", header_template, "header to be sent when receiving new subscriber connection") %>
<%= write_directive("push_stream_message_ttl", message_ttl, "message ttl") %>
<%= write_directive("push_stream_footer_template", footer_template, "footer to be sent when finishing subscriber connection") %>
<%= write_directive("push_stream_max_channel_id_length", max_channel_id_length) %>
<%= write_directive("push_stream_max_subscribers_per_channel", max_subscribers_per_channel, "max subscribers per channel") %>
<%= write_directive("push_stream_max_messages_stored_per_channel", max_messages_stored_per_channel, "max messages to store in memory") %>
<%= write_directive("push_stream_max_number_of_channels", max_number_of_channels) %>
<%= write_directive("push_stream_max_number_of_broadcast_channels", max_number_of_broadcast_channels) %>
<%= write_directive("push_stream_broadcast_channel_max_qtd", broadcast_channel_max_qtd) %>
<%= write_directive("push_stream_broadcast_channel_prefix", broadcast_channel_prefix) %>
<%= write_directive("push_stream_shared_memory_cleanup_objects_ttl", shared_memory_cleanup_objects_ttl) %>
<%= write_directive("push_stream_padding_by_user_agent", padding_by_user_agent) %>
<%= write_directive("push_stream_authorized_channels_only", authorized_channels_only, "subscriber may create channels on demand or only authorized (publisher) may do it?") %>
<%= write_directive("push_stream_shared_memory_size", shared_memory_size) %>
<%= write_directive("push_stream_user_agent", user_agent) %>
<%= write_directive("push_stream_allowed_origins", allowed_origins) %>
<%= write_directive("push_stream_last_received_message_time", last_received_message_time) %>
<%= write_directive("push_stream_last_received_message_tag", last_received_message_tag) %>
<%= write_directive("push_stream_channel_deleted_message_text", channel_deleted_message_text) %>
<%= write_directive("push_stream_ping_message_text", ping_message_text) %>
server {
listen <%= nginx_port %>;
server_name <%= nginx_host %>;
location /channels-stats {
# activate channels statistics mode for this location
push_stream_channels_statistics;
# query string based channel id
set $push_stream_channel_id $arg_id;
<%= write_directive("push_stream_keepalive", keepalive, "keepalive") %>
}
location /pub {
# activate publisher mode for this location
push_stream_publisher <%= publisher_mode unless publisher_mode.nil? || publisher_mode == "normal" %>;
# query string based channel id
set $push_stream_channel_id $arg_id;
<%= write_directive("push_stream_store_messages", store_messages, "store messages") %>
<%= write_directive("push_stream_keepalive", keepalive, "keepalive") %>
# client_max_body_size MUST be equal to client_body_buffer_size or
# you will be sorry.
client_max_body_size <%= client_max_body_size %>;
client_body_buffer_size <%= client_body_buffer_size %>;
}
location ~ /sub/(.*)? {
# activate subscriber mode for this location
push_stream_subscriber <%= subscriber_mode unless subscriber_mode.nil? || subscriber_mode == "streaming" %>;
<%= write_directive("push_stream_eventsource_support", eventsource_support, "activate event source support for this location") %>
# positional channel path
set $push_stream_channels_path $1;
<%= write_directive("push_stream_content_type", content_type, "content-type") %>
<%= write_directive("push_stream_keepalive", keepalive, "keepalive") %>
}
<%= extra_location %>
}
}
)
end
end
require 'spec_helper'
describe "Publisher Channel id collision" do
it "should create and retrieve channels with ids that collide" do
channels = ["A", "plumless", "buckeroo", "B", "fc0591", "123rainerbommert", "C", "a1sellers", "advertees", "D"]
nginx_run_server do |conf|
channels.each do |channel|
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel).post :body => 'x', :timeout => 30
pub.callback do
pub.response_header.status.should eql(200)
EventMachine.stop
end
end
end
channels.each do |channel|
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/channels-stats?id=' + channel).get :timeout => 30
pub.callback do
pub.response_header.status.should eql(200)
EventMachine.stop
end
end
end
end
end
end
This diff is collapsed.
This diff is collapsed.
require 'rubygems'
# Set up gems listed in the Gemfile.
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE'])
Bundler.require(:default, :test) if defined?(Bundler)
require 'nginx_configuration'
RSpec.configure do |config|
config.after(:each) do
NginxTestHelper::Config.delete_config_and_log_files(config_id) if has_passed?
end
config.order = "random"
end
def publish_message_inline(channel, headers, body, &block)
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s).post :head => headers, :body => body, :timeout => 30
pub.callback do
fail("Request was not accepted") if pub.response_header.status != 200
block.call unless block.nil?
end
pub
end
def publish_message(channel, headers, body)
EventMachine.run do
pub = publish_message_inline(channel, headers, body) do
pub.response_header.content_length.should_not eql(0)
response = JSON.parse(pub.response)
response["channel"].to_s.should eql(channel)
EventMachine.stop
end
end
end
def create_channel_by_subscribe(channel, headers, timeout=60, &block)
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers, :timeout => timeout
sub_1.stream do |chunk|
block.call
end
sub_1.callback do
EventMachine.stop
end
end
end
def publish_message_inline_with_callbacks(channel, headers, body, callbacks = {})
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s).post :head => headers, :body => body, :timeout => 30
pub.callback do
if pub.response_header.status == 200
callbacks[:success].call(pub.response_header.status, pub.response) unless callbacks[:success].nil?
else
callbacks[:error].call(pub.response_header.status, pub.response) unless callbacks[:error].nil?
end
end
pub
end
require 'spec_helper'
describe "Comunication Properties" do
let(:config) do
{
:authorized_channels_only => "off",
:header_template => "connected",
:message_ttl => "12s",
:message_template => "~text~",
:ping_message_interval => "1s"
}
end
it "should not block to connected to a nonexistent channel" do
channel = 'ch_test_all_authorized'
nginx_run_server(config, :timeout => 5) do |conf|
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers, :timeout => 60
sub.stream do |chunk|
chunk.should eql("#{conf.header_template}\r\n")
EventMachine.stop
end
end
end
end
it "should block to connected to a nonexistent channel when authorized only is 'on'" do
channel = 'ch_test_only_authorized'
body = 'message to create a channel'
nginx_run_server(config.merge(:authorized_channels_only => "on"), :timeout => 5) do |conf|
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers, :timeout => 60
sub_1.callback do |chunk|
sub_1.response_header.status.should eql(403)
sub_1.response_header.content_length.should eql(0)
sub_1.response_header['X_NGINX_PUSHSTREAM_EXPLAIN'].should eql("Subscriber could not create channels.")
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s ).post :head => headers, :body => body, :timeout => 30
pub.callback do
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers, :timeout => 60
sub_2.stream do |chunk2|
chunk2.should eql("#{conf.header_template}\r\n")
EventMachine.stop
end
end
end
end
end
end
it "should discard messages published a more time than the value configured to message ttl" do
channel = 'ch_test_message_ttl'
body = 'message to test buffer timeout '
response_1 = response_2 = response_3 = ""
sub_1 = sub_2 = sub_3 = nil
nginx_run_server(config, :timeout => 20) do |conf|
EventMachine.run do
pub = EventMachine::HttpRequest.new(nginx_address + '/pub?id=' + channel.to_s ).post :head => headers, :body => body, :timeout => 30
time_2 = EM.add_timer(2) do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 60
sub_1.stream do |chunk|
response_1 += chunk unless response_1.include?(body)
sub_1.close if response_1.include?(body)
end
end
EM.add_timer(6) do
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 60
sub_2.stream do |chunk|
response_2 += chunk unless response_2.include?(body)
sub_2.close if response_2.include?(body)
end
end
#message will be certainly expired at 15 seconds
EM.add_timer(16) do
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 60
sub_3.stream do |chunk|
response_3 += chunk unless response_3.include?(body)
sub_3.close if response_3.include?(body)
end
end
EM.add_timer(17) do
response_1.should eql("#{conf.header_template}\r\n#{body}\r\n")
response_2.should eql("#{conf.header_template}\r\n#{body}\r\n")
response_3.should eql("#{conf.header_template}\r\n")
EventMachine.stop
end
end
end
end
it "should apply the message template to published message with the available keyworkds" do
channel = 'ch_test_message_template'
body = 'message to create a channel'
response = ""
nginx_run_server(config.merge(:message_template => '{\"duplicated\":\"~channel~\", \"channel\":\"~channel~\", \"message\":\"~text~\", \"message_id\":\"~id~\"}'), :timeout => 5) do |conf|
publish_message(channel, headers, body)
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 60
sub.stream do |chunk|
response += chunk
lines = response.split("\r\n")
if lines.length >= 3
lines[0].should eql("#{conf.header_template}")
lines[1].should eql("{\"duplicated\":\"#{channel}\", \"channel\":\"#{channel}\", \"message\":\"#{body}\", \"message_id\":\"1\"}")
lines[2].should eql("{\"duplicated\":\"\", \"channel\":\"\", \"message\":\"\", \"message_id\":\"-1\"}")
EventMachine.stop
end
end
end
end
end
it "should not be in loop when channel or published message contains one of the keywords" do
channel = 'ch_test_message_and_channel_with_same_pattern_of_the_template~channel~~channel~~channel~~text~~text~~text~'
body = '~channel~~channel~~channel~~text~~text~~text~'
response = ""
nginx_run_server(config.merge(:message_template => '{\"channel\":\"~channel~\", \"message\":\"~text~\", \"message_id\":\"~id~\"}'), :timeout => 5) do |conf|
publish_message(channel, headers, body)
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '.b1').get :head => headers, :timeout => 60
sub.stream do |chunk|
response += chunk
lines = response.split("\r\n")
if lines.length >= 3
lines[0].should eql("#{conf.header_template}")
lines[1].should eql("{\"channel\":\"ch_test_message_and_channel_with_same_pattern_of_the_template~channel~~channel~~channel~~channel~~channel~~channel~~text~~text~~text~~channel~~channel~~channel~~text~~text~~text~~channel~~channel~~channel~~text~~text~~text~\", \"message\":\"~channel~~channel~~channel~~text~~text~~text~\", \"message_id\":\"1\"}")
lines[2].should eql("{\"channel\":\"\", \"message\":\"\", \"message_id\":\"-1\"}")
EventMachine.stop
end
end
end
end
end
end
require 'spec_helper'
describe "Subscriber Connection Cleanup" do
let(:config) do
{
:subscriber_connection_ttl => '17s',
:header_template => 'HEADER_TEMPLATE',
:footer_template => 'FOOTER_TEMPLATE',
:ping_message_interval => '3s'
}
end
it "should disconnect the subscriber after the configured connection ttl be reached" do
channel = 'ch_test_subscriber_connection_timeout'
nginx_run_server(config.merge(:ping_message_interval => nil), :timeout => 25) do |conf|
start = Time.now
response = ''
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s, :inactivity_timeout => 20).get :head => headers
sub.stream do |chunk|
response += chunk
response.should include(conf.header_template)
end
sub.callback do
stop = Time.now
time_diff_sec(start, stop).should be_in_the_interval(17, 17.5)
response.should include(conf.footer_template)
EventMachine.stop
end
end
end
end
it "should disconnect the subscriber after the configured connection ttl be reached with ping message" do
channel = 'ch_test_subscriber_connection_timeout_with_ping_message'
nginx_run_server(config.merge(:header_template => nil, :footer_template => nil), :timeout => 25) do |conf|
start = Time.now
chunks_received = 0
EventMachine.run do
sub = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub.stream do |chunk|
chunks_received += 1
end
sub.callback do
stop = Time.now
time_diff_sec(start, stop).should be_in_the_interval(17, 17.5)
chunks_received.should be_eql(5)
EventMachine.stop
end
end
end
end
it "should disconnect each subscriber after the configured connection ttl be reached starting when it connects" do
channel = 'ch_test_multiple_subscribers_connection_timeout'
nginx_run_server(config.merge(:subscriber_connection_ttl => '5s', :ping_message_interval => nil), :timeout => 25) do |conf|
EventMachine.run do
response_1 = ''
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_1.stream do |chunk|
response_1 += chunk
response_1.should include(conf.header_template)
end
sub_1.callback do
response_1.should include(conf.footer_template)
end
sleep(2)
response_2 = ''
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_2.stream do |chunk|
response_2 += chunk
response_2.should include(conf.header_template)
end
sub_2.callback do
response_2.should include(conf.footer_template)
response_4 = ''
sub_4 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_4.stream do |chunk|
response_4 += chunk
response_4.should include(conf.header_template)
end
sub_4.callback do
response_4.should include(conf.footer_template)
EventMachine.stop
end
end
sleep(6)
response_3 = ''
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers
sub_3.stream do |chunk|
response_3 += chunk
response_3.should include(conf.header_template)
end
sub_3.callback do
response_3.should include(conf.footer_template)
end
end
end
end
end
This diff is collapsed.
This diff is collapsed.
require 'spec_helper'
describe "Subscriber Padding by user agent" do
let(:config) do
{
:padding_by_user_agent => "[T|t]est 1,1024,508:[T|t]est 2,4097,0",
:user_agent => nil,
:subscriber_connection_ttl => '1s',
:header_template => nil,
:message_template => nil,
:footer_template => nil
}
end
it "should apply a padding to the header" do
channel = 'ch_test_header_padding'
nginx_run_server(config.merge(:header_template => "0123456789"), :timeout => 5) do |conf|
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(1100 + conf.header_template.size + 4)
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 2"), :timeout => 30
sub_2.callback do
sub_2.response_header.status.should eql(200)
sub_2.response.size.should eql(4097 + conf.header_template.size + 4)
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 3"), :timeout => 30
sub_3.callback do
sub_3.response_header.status.should eql(200)
sub_3.response.size.should eql(conf.header_template.size + 2)
EventMachine.stop
end
end
end
end
end
end
it "should apply a padding to the message" do
channel = 'ch_test_message_padding'
body = "0123456789"
nginx_run_server(config, :timeout => 5) do |conf|
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback {
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(500 + body.size + 4)
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 2"), :timeout => 30
sub_2.callback {
sub_2.response_header.status.should eql(200)
sub_2.response.size.should eql(body.size + 2)
sub_3 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 3"), :timeout => 30
sub_3.callback {
sub_3.response_header.status.should eql(200)
sub_3.response.size.should eql(body.size + 2)
EventMachine.stop
}
publish_message_inline(channel, headers, body)
}
publish_message_inline(channel, headers, body)
}
publish_message_inline(channel, headers, body)
end
end
end
it "should apply a padding to the message with different sizes" do
channel = 'ch_test_message_padding_with_different_sizes'
nginx_run_server(config.merge(:padding_by_user_agent => "[T|t]est 1,0,545"), :timeout => 10) do |conf|
EventMachine.run do
i = 1
expected_padding = 545
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 105
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 221
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 331
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 435
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 502
expected_padding = 600 - ((i/100).to_i * 100)
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(expected_padding + i + 4)
i = 550
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s).get :head => headers.merge("User-Agent" => "Test 1"), :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(i + 2)
EventMachine.stop
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
publish_message_inline(channel, headers, "_" * i)
end
end
end
it "should accept the user agent set by a complex value" do
channel = 'ch_test_user_agent_by_complex_value'
nginx_run_server(config.merge(:padding_by_user_agent => "[T|t]est 1,1024,512", :user_agent => "$arg_ua", :header_template => "0123456789"), :timeout => 10) do |conf|
EventMachine.run do
sub_1 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?ua=test 1').get :head => headers, :timeout => 30
sub_1.callback do
sub_1.response_header.status.should eql(200)
sub_1.response.size.should eql(1024 + conf.header_template.size + 4)
sub_2 = EventMachine::HttpRequest.new(nginx_address + '/sub/' + channel.to_s + '?ua=test 2').get :head => headers, :timeout => 30
sub_2.callback do
sub_2.response_header.status.should eql(200)
sub_2.response.size.should eql(conf.header_template.size + 2)
EventMachine.stop
end
end
end
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
rvm ruby-1.8.7-p302@nginx-push-stream-module --create
source 'http://rubygems.org'
gem 'rake'
group :test do
gem 'POpen4', '0.1.4'
gem 'em-http-request', '1.0.3'
gem 'json', '1.4.3'
gem 'jasmine', '1.0.2.1'
gem 'jshintrb'
gem 'therubyracer'
gem 'listen'
platforms :mri_18 do
gem "ruby-debug"
end
platforms :mri_19 do
gem "ruby-debug19"
end
end
group :docs do
gem 'github-markup'
gem 'RedCloth'
gem 'nokogiri'
end
GEM
remote: http://rubygems.org/
specs:
POpen4 (0.1.4)
Platform (>= 0.4.0)
open4
Platform (0.4.0)
RedCloth (4.2.7)
addressable (2.3.2)
archive-tar-minitar (0.5.2)
childprocess (0.2.0)
ffi (~> 1.0.6)
columnize (0.3.2)
cookiejar (0.3.0)
diff-lcs (1.1.2)
em-http-request (1.0.3)
addressable (>= 2.2.3)
cookiejar
em-socksify
eventmachine (>= 1.0.0.beta.4)
http_parser.rb (>= 0.5.3)
em-socksify (0.2.1)
eventmachine (>= 1.0.0.beta.4)
eventmachine (1.0.0)
execjs (1.4.0)
multi_json (~> 1.0)
ffi (1.0.9)
github-markup (0.7.1)
http_parser.rb (0.5.3)
jasmine (1.0.2.1)
json_pure (>= 1.4.3)
rack (>= 1.1)
rspec (>= 1.3.1)
selenium-webdriver (>= 0.1.3)
jshintrb (0.2.1)
execjs
multi_json (>= 1.3)
rake
json (1.4.3)
json_pure (1.5.3)
libv8 (3.3.10.4)
linecache (0.43)
linecache19 (0.5.11)
ruby_core_source (>= 0.1.4)
listen (0.5.3)
multi_json (1.3.7)
nokogiri (1.5.0)
open4 (1.0.1)
rack (1.3.2)
rake (0.8.7)
rspec (2.6.0)
rspec-core (~> 2.6.0)
rspec-expectations (~> 2.6.0)
rspec-mocks (~> 2.6.0)
rspec-core (2.6.4)
rspec-expectations (2.6.0)
diff-lcs (~> 1.1.2)
rspec-mocks (2.6.0)
ruby-debug (0.10.4)
columnize (>= 0.1)
ruby-debug-base (~> 0.10.4.0)
ruby-debug-base (0.10.4)
linecache (>= 0.3)
ruby-debug-base19 (0.11.24)
columnize (>= 0.3.1)
linecache19 (>= 0.5.11)
ruby_core_source (>= 0.1.4)
ruby-debug19 (0.11.6)
columnize (>= 0.3.1)
linecache19 (>= 0.5.11)
ruby-debug-base19 (>= 0.11.19)
ruby_core_source (0.1.4)
archive-tar-minitar (>= 0.5.2)
rubyzip (0.9.4)
selenium-webdriver (2.3.2)
childprocess (>= 0.1.9)
ffi (>= 1.0.7)
json_pure
rubyzip
therubyracer (0.10.2)
libv8 (~> 3.3.10)
PLATFORMS
ruby
DEPENDENCIES
POpen4 (= 0.1.4)
RedCloth
em-http-request (= 1.0.3)
github-markup
jasmine (= 1.0.2.1)
jshintrb
json (= 1.4.3)
listen
nokogiri
rake
ruby-debug
ruby-debug19
therubyracer
This diff is collapsed.
This diff is collapsed.
module Jasmine
class Config
# Add your overrides or custom config code here
end
end
# Note - this is necessary for rspec2, which has removed the backtrace
module Jasmine
class SpecBuilder
def declare_spec(parent, spec)
me = self
example_name = spec["name"]
@spec_ids << spec["id"]
backtrace = @example_locations[parent.description + " " + example_name]
parent.it example_name, {} do
me.report_spec(spec["id"])
end
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment