Revert "Revert redis merge to fix bugs on master"
This reverts commit 30698d674b.
This commit is contained in:
parent
3688b3982f
commit
8798f05ff7
34 changed files with 707 additions and 365 deletions
6
Gemfile
6
Gemfile
|
|
@ -48,10 +48,14 @@ gem 'mini_magick'
|
||||||
gem 'aws'
|
gem 'aws'
|
||||||
gem 'fastercsv', :require => false
|
gem 'fastercsv', :require => false
|
||||||
gem 'jammit'
|
gem 'jammit'
|
||||||
|
gem 'rest-client'
|
||||||
#Backups
|
#Backups
|
||||||
gem "cloudfiles", :require => false
|
gem "cloudfiles", :require => false
|
||||||
|
|
||||||
|
#Queue
|
||||||
|
gem 'resque'
|
||||||
|
gem 'SystemTimer'
|
||||||
|
|
||||||
group :test, :development do
|
group :test, :development do
|
||||||
gem 'factory_girl_rails'
|
gem 'factory_girl_rails'
|
||||||
gem 'ruby-debug19' if RUBY_VERSION.include? "1.9"
|
gem 'ruby-debug19' if RUBY_VERSION.include? "1.9"
|
||||||
|
|
|
||||||
18
Gemfile.lock
18
Gemfile.lock
|
|
@ -77,6 +77,7 @@ PATH
|
||||||
GEM
|
GEM
|
||||||
remote: http://rubygems.org/
|
remote: http://rubygems.org/
|
||||||
specs:
|
specs:
|
||||||
|
SystemTimer (1.2.1)
|
||||||
abstract (1.0.0)
|
abstract (1.0.0)
|
||||||
actionmailer (3.0.1)
|
actionmailer (3.0.1)
|
||||||
actionpack (= 3.0.1)
|
actionpack (= 3.0.1)
|
||||||
|
|
@ -296,6 +297,14 @@ GEM
|
||||||
rake (>= 0.8.4)
|
rake (>= 0.8.4)
|
||||||
thor (~> 0.14.0)
|
thor (~> 0.14.0)
|
||||||
rake (0.8.7)
|
rake (0.8.7)
|
||||||
|
redis (2.1.1)
|
||||||
|
redis-namespace (0.8.0)
|
||||||
|
redis (< 3.0.0)
|
||||||
|
resque (1.10.0)
|
||||||
|
json (~> 1.4.6)
|
||||||
|
redis-namespace (~> 0.8.0)
|
||||||
|
sinatra (>= 0.9.2)
|
||||||
|
vegas (~> 0.1.2)
|
||||||
rest-client (1.6.1)
|
rest-client (1.6.1)
|
||||||
mime-types (>= 1.16)
|
mime-types (>= 1.16)
|
||||||
rspec (2.2.0)
|
rspec (2.2.0)
|
||||||
|
|
@ -330,6 +339,9 @@ GEM
|
||||||
ffi (~> 0.6.3)
|
ffi (~> 0.6.3)
|
||||||
json_pure
|
json_pure
|
||||||
rubyzip
|
rubyzip
|
||||||
|
sinatra (1.1.0)
|
||||||
|
rack (~> 1.1)
|
||||||
|
tilt (~> 1.1)
|
||||||
subexec (0.0.4)
|
subexec (0.0.4)
|
||||||
systemu (1.2.0)
|
systemu (1.2.0)
|
||||||
term-ansicolor (1.0.5)
|
term-ansicolor (1.0.5)
|
||||||
|
|
@ -338,6 +350,7 @@ GEM
|
||||||
eventmachine (>= 0.12.6)
|
eventmachine (>= 0.12.6)
|
||||||
rack (>= 1.0.0)
|
rack (>= 1.0.0)
|
||||||
thor (0.14.6)
|
thor (0.14.6)
|
||||||
|
tilt (1.1)
|
||||||
treetop (1.4.9)
|
treetop (1.4.9)
|
||||||
polyglot (>= 0.3.1)
|
polyglot (>= 0.3.1)
|
||||||
twitter (0.9.12)
|
twitter (0.9.12)
|
||||||
|
|
@ -347,6 +360,8 @@ GEM
|
||||||
oauth (~> 0.4.3)
|
oauth (~> 0.4.3)
|
||||||
tzinfo (0.3.23)
|
tzinfo (0.3.23)
|
||||||
uuidtools (2.1.1)
|
uuidtools (2.1.1)
|
||||||
|
vegas (0.1.8)
|
||||||
|
rack (>= 1.0.0)
|
||||||
warden (0.10.7)
|
warden (0.10.7)
|
||||||
rack (>= 1.0.0)
|
rack (>= 1.0.0)
|
||||||
webmock (1.6.1)
|
webmock (1.6.1)
|
||||||
|
|
@ -360,6 +375,7 @@ PLATFORMS
|
||||||
ruby
|
ruby
|
||||||
|
|
||||||
DEPENDENCIES
|
DEPENDENCIES
|
||||||
|
SystemTimer
|
||||||
addressable
|
addressable
|
||||||
aws
|
aws
|
||||||
bson (= 1.1)
|
bson (= 1.1)
|
||||||
|
|
@ -393,6 +409,8 @@ DEPENDENCIES
|
||||||
omniauth
|
omniauth
|
||||||
pubsubhubbub
|
pubsubhubbub
|
||||||
rails (= 3.0.1)
|
rails (= 3.0.1)
|
||||||
|
resque
|
||||||
|
rest-client
|
||||||
roxml!
|
roxml!
|
||||||
rspec (>= 2.0.0)
|
rspec (>= 2.0.0)
|
||||||
rspec-instafail
|
rspec-instafail
|
||||||
|
|
|
||||||
1
Rakefile
1
Rakefile
|
|
@ -7,5 +7,6 @@
|
||||||
|
|
||||||
require File.expand_path('../config/application', __FILE__)
|
require File.expand_path('../config/application', __FILE__)
|
||||||
require 'rake'
|
require 'rake'
|
||||||
|
require 'resque/tasks'
|
||||||
|
|
||||||
Diaspora::Application.load_tasks
|
Diaspora::Application.load_tasks
|
||||||
|
|
|
||||||
|
|
@ -116,14 +116,6 @@ class PeopleController < ApplicationController
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
def webfinger(account, opts = {})
|
def webfinger(account, opts = {})
|
||||||
finger = EMWebfinger.new(account)
|
Resque.enqueue(Jobs::SocketWebfinger, current_user.id, account, opts)
|
||||||
finger.on_person do |response|
|
|
||||||
if response.class == Person
|
|
||||||
response.socket_to_uid(current_user.id, opts)
|
|
||||||
else
|
|
||||||
require File.join(Rails.root,'lib/diaspora/websocket')
|
|
||||||
Diaspora::WebSocket.queue_to_user(current_user.id, {:class => 'people', :status => 'fail', :query => account, :response => response}.to_json)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -56,12 +56,7 @@ class PublicsController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
@user = person.owner
|
@user = person.owner
|
||||||
|
Resque.enqueue(Jobs::ReceiveSalmon, @user.id, params[:xml])
|
||||||
begin
|
|
||||||
@user.receive_salmon(params[:xml])
|
|
||||||
rescue Exception => e
|
|
||||||
Rails.logger.info("bad salmon: #{e.message}")
|
|
||||||
end
|
|
||||||
|
|
||||||
render :nothing => true, :status => 200
|
render :nothing => true, :status => 200
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
# licensed under the Affero General Public License version 3 or later. See
|
# licensed under the Affero General Public License version 3 or later. See
|
||||||
# the COPYRIGHT file.
|
# the COPYRIGHT file.
|
||||||
|
|
||||||
require File.join(Rails.root, 'lib/em-webfinger')
|
require File.join(Rails.root, 'lib/webfinger')
|
||||||
|
|
||||||
class RequestsController < ApplicationController
|
class RequestsController < ApplicationController
|
||||||
before_filter :authenticate_user!
|
before_filter :authenticate_user!
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ class HandleValidator < ActiveModel::Validator
|
||||||
end
|
end
|
||||||
|
|
||||||
class Comment
|
class Comment
|
||||||
require File.join(Rails.root, 'lib/diaspora/websocket')
|
require File.join(Rails.root, 'lib/diaspora/web_socket')
|
||||||
require File.join(Rails.root, 'lib/youtube_titles')
|
require File.join(Rails.root, 'lib/youtube_titles')
|
||||||
include YoutubeTitles
|
include YoutubeTitles
|
||||||
include MongoMapper::Document
|
include MongoMapper::Document
|
||||||
|
|
|
||||||
10
app/models/jobs/receive_salmon.rb
Normal file
10
app/models/jobs/receive_salmon.rb
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
module Jobs
|
||||||
|
class ReceiveSalmon
|
||||||
|
@queue = :receive
|
||||||
|
def self.perform(user_id, xml)
|
||||||
|
user = User.find(user_id)
|
||||||
|
user.receive_salmon(xml)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
15
app/models/jobs/socket_webfinger.rb
Normal file
15
app/models/jobs/socket_webfinger.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
module Jobs
|
||||||
|
class SocketWebfinger
|
||||||
|
@queue = :receive
|
||||||
|
def self.perform(user_id, account, opts={})
|
||||||
|
finger = Webfinger.new(account)
|
||||||
|
begin
|
||||||
|
result = finger.fetch
|
||||||
|
result.socket_to_uid(user_id, opts)
|
||||||
|
rescue
|
||||||
|
Diaspora::WebSocket.queue_to_user(user_id, {:class => 'people', :status => 'fail', :query => account, :response => I18n.t('people.webfinger.fail')}.to_json)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
@ -8,7 +8,7 @@ class Person
|
||||||
include MongoMapper::Document
|
include MongoMapper::Document
|
||||||
include ROXML
|
include ROXML
|
||||||
include Encryptor::Public
|
include Encryptor::Public
|
||||||
require File.join(Rails.root, 'lib/diaspora/websocket')
|
require File.join(Rails.root, 'lib/diaspora/web_socket')
|
||||||
include Diaspora::Socketable
|
include Diaspora::Socketable
|
||||||
|
|
||||||
xml_accessor :_id
|
xml_accessor :_id
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
class Post
|
class Post
|
||||||
require File.join(Rails.root, 'lib/encryptable')
|
require File.join(Rails.root, 'lib/encryptable')
|
||||||
require File.join(Rails.root, 'lib/diaspora/websocket')
|
require File.join(Rails.root, 'lib/diaspora/web_socket')
|
||||||
include MongoMapper::Document
|
include MongoMapper::Document
|
||||||
include ApplicationHelper
|
include ApplicationHelper
|
||||||
include ROXML
|
include ROXML
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ class Retraction
|
||||||
if self.type.constantize.find_by_id(post_id)
|
if self.type.constantize.find_by_id(post_id)
|
||||||
unless Post.first(:diaspora_handle => person.diaspora_handle, :id => post_id)
|
unless Post.first(:diaspora_handle => person.diaspora_handle, :id => post_id)
|
||||||
Rails.logger.info("event=retraction status=abort reason='no post found authored by retractor' sender=#{person.diaspora_handle} post_id=#{post_id}")
|
Rails.logger.info("event=retraction status=abort reason='no post found authored by retractor' sender=#{person.diaspora_handle} post_id=#{post_id}")
|
||||||
raise "#{person.inspect} is trying to retract a post that either doesn't exist or is not by them"
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
begin
|
begin
|
||||||
|
|
|
||||||
312
chef/cookbooks/centos/files/default/redis.conf
Normal file
312
chef/cookbooks/centos/files/default/redis.conf
Normal file
|
|
@ -0,0 +1,312 @@
|
||||||
|
# Redis configuration file example
|
||||||
|
|
||||||
|
# Note on units: when memory size is needed, it is possible to specifiy
|
||||||
|
# it in the usual form of 1k 5GB 4M and so forth:
|
||||||
|
#
|
||||||
|
# 1k => 1000 bytes
|
||||||
|
# 1kb => 1024 bytes
|
||||||
|
# 1m => 1000000 bytes
|
||||||
|
# 1mb => 1024*1024 bytes
|
||||||
|
# 1g => 1000000000 bytes
|
||||||
|
# 1gb => 1024*1024*1024 bytes
|
||||||
|
#
|
||||||
|
# units are case insensitive so 1GB 1Gb 1gB are all the same.
|
||||||
|
|
||||||
|
# By default Redis does not run as a daemon. Use 'yes' if you need it.
|
||||||
|
# Note that Redis will write a pid file in /usr/local/var/run/redis.pid when daemonized.
|
||||||
|
daemonize no
|
||||||
|
|
||||||
|
# When running daemonized, Redis writes a pid file in /usr/local/var/run/redis.pid by
|
||||||
|
# default. You can specify a custom pid file location here.
|
||||||
|
pidfile /usr/local/var/run/redis.pid
|
||||||
|
|
||||||
|
# Accept connections on the specified port, default is 6379
|
||||||
|
port 6379
|
||||||
|
|
||||||
|
# If you want you can bind a single interface, if the bind option is not
|
||||||
|
# specified all the interfaces will listen for incoming connections.
|
||||||
|
#
|
||||||
|
# bind 127.0.0.1
|
||||||
|
|
||||||
|
# Close the connection after a client is idle for N seconds (0 to disable)
|
||||||
|
timeout 300
|
||||||
|
|
||||||
|
# Set server verbosity to 'debug'
|
||||||
|
# it can be one of:
|
||||||
|
# debug (a lot of information, useful for development/testing)
|
||||||
|
# verbose (many rarely useful info, but not a mess like the debug level)
|
||||||
|
# notice (moderately verbose, what you want in production probably)
|
||||||
|
# warning (only very important / critical messages are logged)
|
||||||
|
loglevel verbose
|
||||||
|
|
||||||
|
# Specify the log file name. Also 'stdout' can be used to force
|
||||||
|
# Redis to log on the standard output. Note that if you use standard
|
||||||
|
# output for logging but daemonize, logs will be sent to /dev/null
|
||||||
|
logfile stdout
|
||||||
|
|
||||||
|
# Set the number of databases. The default database is DB 0, you can select
|
||||||
|
# a different one on a per-connection basis using SELECT <dbid> where
|
||||||
|
# dbid is a number between 0 and 'databases'-1
|
||||||
|
databases 16
|
||||||
|
|
||||||
|
################################ SNAPSHOTTING #################################
|
||||||
|
#
|
||||||
|
# Save the DB on disk:
|
||||||
|
#
|
||||||
|
# save <seconds> <changes>
|
||||||
|
#
|
||||||
|
# Will save the DB if both the given number of seconds and the given
|
||||||
|
# number of write operations against the DB occurred.
|
||||||
|
#
|
||||||
|
# In the example below the behaviour will be to save:
|
||||||
|
# after 900 sec (15 min) if at least 1 key changed
|
||||||
|
# after 300 sec (5 min) if at least 10 keys changed
|
||||||
|
# after 60 sec if at least 10000 keys changed
|
||||||
|
#
|
||||||
|
# Note: you can disable saving at all commenting all the "save" lines.
|
||||||
|
|
||||||
|
save 900 1
|
||||||
|
save 300 10
|
||||||
|
save 60 10000
|
||||||
|
|
||||||
|
# Compress string objects using LZF when dump .rdb databases?
|
||||||
|
# For default that's set to 'yes' as it's almost always a win.
|
||||||
|
# If you want to save some CPU in the saving child set it to 'no' but
|
||||||
|
# the dataset will likely be bigger if you have compressible values or keys.
|
||||||
|
rdbcompression yes
|
||||||
|
|
||||||
|
# The filename where to dump the DB
|
||||||
|
dbfilename dump.rdb
|
||||||
|
|
||||||
|
# The working directory.
|
||||||
|
#
|
||||||
|
# The DB will be written inside this directory, with the filename specified
|
||||||
|
# above using the 'dbfilename' configuration directive.
|
||||||
|
#
|
||||||
|
# Also the Append Only File will be created inside this directory.
|
||||||
|
#
|
||||||
|
# Note that you must specify a directory here, not a file name.
|
||||||
|
dir /usr/local/var/db/redis/
|
||||||
|
|
||||||
|
################################# REPLICATION #################################
|
||||||
|
|
||||||
|
# Master-Slave replication. Use slaveof to make a Redis instance a copy of
|
||||||
|
# another Redis server. Note that the configuration is local to the slave
|
||||||
|
# so for example it is possible to configure the slave to save the DB with a
|
||||||
|
# different interval, or to listen to another port, and so on.
|
||||||
|
#
|
||||||
|
# slaveof <masterip> <masterport>
|
||||||
|
|
||||||
|
# If the master is password protected (using the "requirepass" configuration
|
||||||
|
# directive below) it is possible to tell the slave to authenticate before
|
||||||
|
# starting the replication synchronization process, otherwise the master will
|
||||||
|
# refuse the slave request.
|
||||||
|
#
|
||||||
|
# masterauth <master-password>
|
||||||
|
|
||||||
|
################################## SECURITY ###################################
|
||||||
|
|
||||||
|
# Require clients to issue AUTH <PASSWORD> before processing any other
|
||||||
|
# commands. This might be useful in environments in which you do not trust
|
||||||
|
# others with access to the host running redis-server.
|
||||||
|
#
|
||||||
|
# This should stay commented out for backward compatibility and because most
|
||||||
|
# people do not need auth (e.g. they run their own servers).
|
||||||
|
#
|
||||||
|
# Warning: since Redis is pretty fast an outside user can try up to
|
||||||
|
# 150k passwords per second against a good box. This means that you should
|
||||||
|
# use a very strong password otherwise it will be very easy to break.
|
||||||
|
#
|
||||||
|
# requirepass foobared
|
||||||
|
|
||||||
|
################################### LIMITS ####################################
|
||||||
|
|
||||||
|
# Set the max number of connected clients at the same time. By default there
|
||||||
|
# is no limit, and it's up to the number of file descriptors the Redis process
|
||||||
|
# is able to open. The special value '0' means no limits.
|
||||||
|
# Once the limit is reached Redis will close all the new connections sending
|
||||||
|
# an error 'max number of clients reached'.
|
||||||
|
#
|
||||||
|
# maxclients 128
|
||||||
|
|
||||||
|
# Don't use more memory than the specified amount of bytes.
|
||||||
|
# When the memory limit is reached Redis will try to remove keys with an
|
||||||
|
# EXPIRE set. It will try to start freeing keys that are going to expire
|
||||||
|
# in little time and preserve keys with a longer time to live.
|
||||||
|
# Redis will also try to remove objects from free lists if possible.
|
||||||
|
#
|
||||||
|
# If all this fails, Redis will start to reply with errors to commands
|
||||||
|
# that will use more memory, like SET, LPUSH, and so on, and will continue
|
||||||
|
# to reply to most read-only commands like GET.
|
||||||
|
#
|
||||||
|
# WARNING: maxmemory can be a good idea mainly if you want to use Redis as a
|
||||||
|
# 'state' server or cache, not as a real DB. When Redis is used as a real
|
||||||
|
# database the memory usage will grow over the weeks, it will be obvious if
|
||||||
|
# it is going to use too much memory in the long run, and you'll have the time
|
||||||
|
# to upgrade. With maxmemory after the limit is reached you'll start to get
|
||||||
|
# errors for write operations, and this may even lead to DB inconsistency.
|
||||||
|
#
|
||||||
|
# maxmemory <bytes>
|
||||||
|
|
||||||
|
############################## APPEND ONLY MODE ###############################
|
||||||
|
|
||||||
|
# By default Redis asynchronously dumps the dataset on disk. If you can live
|
||||||
|
# with the idea that the latest records will be lost if something like a crash
|
||||||
|
# happens this is the preferred way to run Redis. If instead you care a lot
|
||||||
|
# about your data and don't want to that a single record can get lost you should
|
||||||
|
# enable the append only mode: when this mode is enabled Redis will append
|
||||||
|
# every write operation received in the file appendonly.aof. This file will
|
||||||
|
# be read on startup in order to rebuild the full dataset in memory.
|
||||||
|
#
|
||||||
|
# Note that you can have both the async dumps and the append only file if you
|
||||||
|
# like (you have to comment the "save" statements above to disable the dumps).
|
||||||
|
# Still if append only mode is enabled Redis will load the data from the
|
||||||
|
# log file at startup ignoring the dump.rdb file.
|
||||||
|
#
|
||||||
|
# IMPORTANT: Check the BGREWRITEAOF to check how to rewrite the append
|
||||||
|
# log file in background when it gets too big.
|
||||||
|
|
||||||
|
appendonly no
|
||||||
|
|
||||||
|
# The name of the append only file (default: "appendonly.aof")
|
||||||
|
# appendfilename appendonly.aof
|
||||||
|
|
||||||
|
# The fsync() call tells the Operating System to actually write data on disk
|
||||||
|
# instead to wait for more data in the output buffer. Some OS will really flush
|
||||||
|
# data on disk, some other OS will just try to do it ASAP.
|
||||||
|
#
|
||||||
|
# Redis supports three different modes:
|
||||||
|
#
|
||||||
|
# no: don't fsync, just let the OS flush the data when it wants. Faster.
|
||||||
|
# always: fsync after every write to the append only log . Slow, Safest.
|
||||||
|
# everysec: fsync only if one second passed since the last fsync. Compromise.
|
||||||
|
#
|
||||||
|
# The default is "everysec" that's usually the right compromise between
|
||||||
|
# speed and data safety. It's up to you to understand if you can relax this to
|
||||||
|
# "no" that will will let the operating system flush the output buffer when
|
||||||
|
# it wants, for better performances (but if you can live with the idea of
|
||||||
|
# some data loss consider the default persistence mode that's snapshotting),
|
||||||
|
# or on the contrary, use "always" that's very slow but a bit safer than
|
||||||
|
# everysec.
|
||||||
|
#
|
||||||
|
# If unsure, use "everysec".
|
||||||
|
|
||||||
|
# appendfsync always
|
||||||
|
appendfsync everysec
|
||||||
|
# appendfsync no
|
||||||
|
|
||||||
|
################################ VIRTUAL MEMORY ###############################
|
||||||
|
|
||||||
|
# Virtual Memory allows Redis to work with datasets bigger than the actual
|
||||||
|
# amount of RAM needed to hold the whole dataset in memory.
|
||||||
|
# In order to do so very used keys are taken in memory while the other keys
|
||||||
|
# are swapped into a swap file, similarly to what operating systems do
|
||||||
|
# with memory pages.
|
||||||
|
#
|
||||||
|
# To enable VM just set 'vm-enabled' to yes, and set the following three
|
||||||
|
# VM parameters accordingly to your needs.
|
||||||
|
|
||||||
|
vm-enabled no
|
||||||
|
# vm-enabled yes
|
||||||
|
|
||||||
|
# This is the path of the Redis swap file. As you can guess, swap files
|
||||||
|
# can't be shared by different Redis instances, so make sure to use a swap
|
||||||
|
# file for every redis process you are running. Redis will complain if the
|
||||||
|
# swap file is already in use.
|
||||||
|
#
|
||||||
|
# The best kind of storage for the Redis swap file (that's accessed at random)
|
||||||
|
# is a Solid State Disk (SSD).
|
||||||
|
#
|
||||||
|
# *** WARNING *** if you are using a shared hosting the default of putting
|
||||||
|
# the swap file under /tmp is not secure. Create a dir with access granted
|
||||||
|
# only to Redis user and configure Redis to create the swap file there.
|
||||||
|
vm-swap-file /tmp/redis.swap
|
||||||
|
|
||||||
|
# vm-max-memory configures the VM to use at max the specified amount of
|
||||||
|
# RAM. Everything that deos not fit will be swapped on disk *if* possible, that
|
||||||
|
# is, if there is still enough contiguous space in the swap file.
|
||||||
|
#
|
||||||
|
# With vm-max-memory 0 the system will swap everything it can. Not a good
|
||||||
|
# default, just specify the max amount of RAM you can in bytes, but it's
|
||||||
|
# better to leave some margin. For instance specify an amount of RAM
|
||||||
|
# that's more or less between 60 and 80% of your free RAM.
|
||||||
|
vm-max-memory 0
|
||||||
|
|
||||||
|
# Redis swap files is split into pages. An object can be saved using multiple
|
||||||
|
# contiguous pages, but pages can't be shared between different objects.
|
||||||
|
# So if your page is too big, small objects swapped out on disk will waste
|
||||||
|
# a lot of space. If you page is too small, there is less space in the swap
|
||||||
|
# file (assuming you configured the same number of total swap file pages).
|
||||||
|
#
|
||||||
|
# If you use a lot of small objects, use a page size of 64 or 32 bytes.
|
||||||
|
# If you use a lot of big objects, use a bigger page size.
|
||||||
|
# If unsure, use the default :)
|
||||||
|
vm-page-size 32
|
||||||
|
|
||||||
|
# Number of total memory pages in the swap file.
|
||||||
|
# Given that the page table (a bitmap of free/used pages) is taken in memory,
|
||||||
|
# every 8 pages on disk will consume 1 byte of RAM.
|
||||||
|
#
|
||||||
|
# The total swap size is vm-page-size * vm-pages
|
||||||
|
#
|
||||||
|
# With the default of 32-bytes memory pages and 134217728 pages Redis will
|
||||||
|
# use a 4 GB swap file, that will use 16 MB of RAM for the page table.
|
||||||
|
#
|
||||||
|
# It's better to use the smallest acceptable value for your application,
|
||||||
|
# but the default is large in order to work in most conditions.
|
||||||
|
vm-pages 134217728
|
||||||
|
|
||||||
|
# Max number of VM I/O threads running at the same time.
|
||||||
|
# This threads are used to read/write data from/to swap file, since they
|
||||||
|
# also encode and decode objects from disk to memory or the reverse, a bigger
|
||||||
|
# number of threads can help with big objects even if they can't help with
|
||||||
|
# I/O itself as the physical device may not be able to couple with many
|
||||||
|
# reads/writes operations at the same time.
|
||||||
|
#
|
||||||
|
# The special value of 0 turn off threaded I/O and enables the blocking
|
||||||
|
# Virtual Memory implementation.
|
||||||
|
vm-max-threads 4
|
||||||
|
|
||||||
|
############################### ADVANCED CONFIG ###############################
|
||||||
|
|
||||||
|
# Glue small output buffers together in order to send small replies in a
|
||||||
|
# single TCP packet. Uses a bit more CPU but most of the times it is a win
|
||||||
|
# in terms of number of queries per second. Use 'yes' if unsure.
|
||||||
|
glueoutputbuf yes
|
||||||
|
|
||||||
|
# Hashes are encoded in a special way (much more memory efficient) when they
|
||||||
|
# have at max a given numer of elements, and the biggest element does not
|
||||||
|
# exceed a given threshold. You can configure this limits with the following
|
||||||
|
# configuration directives.
|
||||||
|
hash-max-zipmap-entries 64
|
||||||
|
hash-max-zipmap-value 512
|
||||||
|
|
||||||
|
# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in
|
||||||
|
# order to help rehashing the main Redis hash table (the one mapping top-level
|
||||||
|
# keys to values). The hash table implementation redis uses (see dict.c)
|
||||||
|
# performs a lazy rehashing: the more operation you run into an hash table
|
||||||
|
# that is rhashing, the more rehashing "steps" are performed, so if the
|
||||||
|
# server is idle the rehashing is never complete and some more memory is used
|
||||||
|
# by the hash table.
|
||||||
|
#
|
||||||
|
# The default is to use this millisecond 10 times every second in order to
|
||||||
|
# active rehashing the main dictionaries, freeing memory when possible.
|
||||||
|
#
|
||||||
|
# If unsure:
|
||||||
|
# use "activerehashing no" if you have hard latency requirements and it is
|
||||||
|
# not a good thing in your environment that Redis can reply form time to time
|
||||||
|
# to queries with 2 milliseconds delay.
|
||||||
|
#
|
||||||
|
# use "activerehashing yes" if you don't have such hard requirements but
|
||||||
|
# want to free memory asap when possible.
|
||||||
|
activerehashing yes
|
||||||
|
|
||||||
|
################################## INCLUDES ###################################
|
||||||
|
|
||||||
|
# Include one or more other config files here. This is useful if you
|
||||||
|
# have a standard template that goes to all redis server but also need
|
||||||
|
# to customize a few per-server settings. Include files can include
|
||||||
|
# other files, so use this wisely.
|
||||||
|
#
|
||||||
|
# include /path/to/local.conf
|
||||||
|
# include /path/to/other.conf
|
||||||
|
|
@ -18,3 +18,4 @@ include_recipe "centos::image_magick"
|
||||||
include_recipe "centos::mongo_db"
|
include_recipe "centos::mongo_db"
|
||||||
include_recipe "common::main"
|
include_recipe "common::main"
|
||||||
include_recipe "centos::nginx"
|
include_recipe "centos::nginx"
|
||||||
|
include_recipe "centos::redis"
|
||||||
|
|
|
||||||
20
chef/cookbooks/centos/recipes/redis.rb
Normal file
20
chef/cookbooks/centos/recipes/redis.rb
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
|
||||||
|
execute "refresh yum" do
|
||||||
|
command "yum update -y"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "install redis" do
|
||||||
|
command "yum install -y redis"
|
||||||
|
end
|
||||||
|
|
||||||
|
cookbook_file "/usr/local/etc/redis.conf" do
|
||||||
|
source "redis.conf"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "change redis.conf permissions" do
|
||||||
|
command "chmod 755 /usr/local/etc/redis.conf"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "make the redis db directory" do
|
||||||
|
command "mkdir -p /usr/local/var/db/redis"
|
||||||
|
end
|
||||||
|
|
@ -15,6 +15,8 @@
|
||||||
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 80 -j ACCEPT
|
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 80 -j ACCEPT
|
||||||
#HTTPS
|
#HTTPS
|
||||||
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 443 -j ACCEPT
|
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 443 -j ACCEPT
|
||||||
|
#Resque-Web
|
||||||
|
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 7894 -j ACCEPT
|
||||||
#Websocket
|
#Websocket
|
||||||
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 8080 -j ACCEPT
|
-A RH-Firewall-1-INPUT -m state --state NEW -m tcp -p tcp --dport 8080 -j ACCEPT
|
||||||
#Crossdomain policy file for Flash sockets
|
#Crossdomain policy file for Flash sockets
|
||||||
|
|
|
||||||
|
|
@ -49,9 +49,33 @@ execute "executable" do
|
||||||
command "chmod -R 755 /service/magent"
|
command "chmod -R 755 /service/magent"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
execute "redis run" do
|
||||||
|
command "mkdir -p /service/redis && echo '#!/bin/sh' > /service/redis/run && echo 'cd /usr/sbin/ && exec /usr/sbin/redis-server /usr/local/etc/redis.conf' >> /service/redis/run"
|
||||||
|
end
|
||||||
|
execute "executable" do
|
||||||
|
command "chmod -R 755 /service/redis"
|
||||||
|
end
|
||||||
|
|
||||||
execute "nginx run" do
|
execute "nginx run" do
|
||||||
command "mkdir -p /service/nginx && echo '#!/bin/sh' > /service/nginx/run && echo 'exec /usr/local/nginx/sbin/nginx' >> /service/nginx/run"
|
command "mkdir -p /service/nginx && echo '#!/bin/sh' > /service/nginx/run && echo 'exec /usr/local/nginx/sbin/nginx' >> /service/nginx/run"
|
||||||
end
|
end
|
||||||
|
|
||||||
execute "executable" do
|
execute "executable" do
|
||||||
command "chmod -R 755 /service/nginx"
|
command "chmod -R 755 /service/nginx"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
execute "resque worker run" do
|
||||||
|
command "mkdir -p /service/resque_worker && echo '#!/bin/sh' > /service/resque_worker/run && echo 'cd /user/local/app/diaspora && RAILS_ENV=production QUEUE=* HOME=/usr/local/app/diaspora exec /usr/local/bin/rake resque:work' >> /service/resque_worker/run"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "executable" do
|
||||||
|
command "chmod -R 755 /service/resque_worker"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "resque web run" do
|
||||||
|
command "mkdir -p /service/resque_web && echo '#!/bin/sh' > /service/resque_web/run && echo 'RAILS_ENV=production HOME=/usr/local/app/diaspora exec resque-web -F' >> /service/resque_web/run"
|
||||||
|
end
|
||||||
|
|
||||||
|
execute "executable" do
|
||||||
|
command "chmod -R 755 /service/resque_web"
|
||||||
|
end
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,10 @@ http {
|
||||||
<% end %>
|
<% end %>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
upstream resque_web {
|
||||||
|
server localhost:5678;
|
||||||
|
}
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 843;
|
listen 843;
|
||||||
|
|
||||||
|
|
@ -51,6 +55,23 @@ http {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 7894;
|
||||||
|
server_name <%= @url %> www.<%= @url %>;
|
||||||
|
|
||||||
|
auth_basic "Restricted";
|
||||||
|
auth_basic_user_file htpasswd;
|
||||||
|
|
||||||
|
ssl on;
|
||||||
|
ssl_certificate /usr/local/nginx/conf/diaspora.crt;
|
||||||
|
ssl_certificate_key /usr/local/nginx/conf/diaspora.key;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_pass http://resque_web;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
server_name <%= @url %> www.<%= @url %>;
|
server_name <%= @url %> www.<%= @url %>;
|
||||||
|
|
@ -67,6 +88,7 @@ http {
|
||||||
ssl_certificate <%= @cert_location %>;
|
ssl_certificate <%= @cert_location %>;
|
||||||
ssl_certificate_key <%= @key_location %>;
|
ssl_certificate_key <%= @key_location %>;
|
||||||
|
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
|
@ -85,13 +107,13 @@ http {
|
||||||
if (!-f $request_filename) {
|
if (!-f $request_filename) {
|
||||||
proxy_pass http://thin_cluster;
|
proxy_pass http://thin_cluster;
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
error_page 500 502 503 504 /50x.html;
|
error_page 500 502 503 504 /50x.html;
|
||||||
location = /50x.html {
|
location = /50x.html {
|
||||||
root html;
|
root html;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
4
config/initializers/resque.rb
Normal file
4
config/initializers/resque.rb
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
Dir[File.join(Rails.root, 'app', 'models', 'jobs', '*.rb')].each { |file| require file }
|
||||||
|
#config = YAML::load(File.open("#{Rails.root}/config/redis.yml"))
|
||||||
|
#Resque.redis = Redis.new(:host => config['host'], :port => config['port'])
|
||||||
|
require 'resque'
|
||||||
|
|
@ -6,7 +6,6 @@
|
||||||
# See http://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points.
|
# See http://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points.
|
||||||
|
|
||||||
en:
|
en:
|
||||||
|
|
||||||
settings: "Settings"
|
settings: "Settings"
|
||||||
profile: "Profile"
|
profile: "Profile"
|
||||||
account: "Account"
|
account: "Account"
|
||||||
|
|
@ -302,6 +301,8 @@ en:
|
||||||
add_contact: "add contact"
|
add_contact: "add contact"
|
||||||
index:
|
index:
|
||||||
results_for: "search results for"
|
results_for: "search results for"
|
||||||
|
webfinger:
|
||||||
|
fail: "Sorry, we couldn't find %{handle}."
|
||||||
show:
|
show:
|
||||||
no_posts: "no posts to display!"
|
no_posts: "no posts to display!"
|
||||||
incoming_request: "You have an incoming request from this person."
|
incoming_request: "You have an incoming request from this person."
|
||||||
|
|
|
||||||
10
config/redis.yml
Normal file
10
config/redis.yml
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
default:
|
||||||
|
host: localhost
|
||||||
|
port: 6379
|
||||||
|
|
||||||
|
development:
|
||||||
|
test:
|
||||||
|
staging:
|
||||||
|
production:
|
||||||
|
|
||||||
|
|
@ -1,28 +1,28 @@
|
||||||
require File.join(Rails.root, 'lib/em-webfinger')
|
require File.join(Rails.root, 'lib/webfinger')
|
||||||
|
|
||||||
module Diaspora
|
module Diaspora
|
||||||
module UserModules
|
module UserModules
|
||||||
module Receiving
|
module Receiving
|
||||||
def receive_salmon salmon_xml
|
def receive_salmon salmon_xml
|
||||||
salmon = Salmon::SalmonSlap.parse salmon_xml, self
|
salmon = Salmon::SalmonSlap.parse salmon_xml, self
|
||||||
webfinger = EMWebfinger.new(salmon.author_email)
|
webfinger = Webfinger.new(salmon.author_email)
|
||||||
|
begin
|
||||||
webfinger.on_person { |response|
|
salmon_author = webfinger.fetch
|
||||||
if response.is_a? Person
|
rescue Exception => e
|
||||||
salmon_author = response
|
Rails.logger.info("event=receive status=abort recipient=#{self.diaspora_handle} sender=#{salmon.author_email} reason='#{e.message}'")
|
||||||
if salmon.verified_for_key?(salmon_author.public_key)
|
end
|
||||||
self.receive(salmon.parsed_data, salmon_author)
|
|
||||||
end
|
if salmon.verified_for_key?(salmon_author.public_key)
|
||||||
else
|
self.receive(salmon.parsed_data, salmon_author)
|
||||||
Rails.logger.info("event=receive status=abort recipient=#{self.diaspora_handle} sender=#{salmon.author_email} reason='#{response}'")
|
else
|
||||||
end
|
Rails.logger.info("event=receive status=abort recipient=#{self.diaspora_handle} sender=#{salmon.author_email} reason='not_verified for key'")
|
||||||
}
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def receive xml, salmon_author
|
def receive xml, salmon_author
|
||||||
object = Diaspora::Parser.from_xml(xml)
|
object = Diaspora::Parser.from_xml(xml)
|
||||||
Rails.logger.info("event=receive status=start recipient=#{self.diaspora_handle} payload_type=#{object.class} sender=#{salmon_author.diaspora_handle}")
|
Rails.logger.info("event=receive status=start recipient=#{self.diaspora_handle} payload_type=#{object.class} sender=#{salmon_author.diaspora_handle}")
|
||||||
|
|
||||||
if object.is_a?(Request)
|
if object.is_a?(Request)
|
||||||
salmon_author.save
|
salmon_author.save
|
||||||
object.sender_handle = salmon_author.diaspora_handle
|
object.sender_handle = salmon_author.diaspora_handle
|
||||||
|
|
@ -39,19 +39,25 @@ module Diaspora
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
e = EMWebfinger.new(object.diaspora_handle)
|
e = Webfinger.new(object.diaspora_handle)
|
||||||
|
|
||||||
e.on_person do |person|
|
begin
|
||||||
if person.class == Person
|
person = e.fetch
|
||||||
object.person = person if object.respond_to? :person=
|
rescue Exception => e
|
||||||
unless object.is_a?(Request) || self.contact_for(salmon_author)
|
Rails.logger.info("event=receive status=abort reason='#{e.message}' payload_type=#{object.class} recipient=#{self.diaspora_handle} sender=#{salmon_author.diaspora_handle}")
|
||||||
Rails.logger.info("event=receive status=abort reason='sender not connected to recipient' recipient=#{self.diaspora_handle} sender=#{salmon_author.diaspora_handle} payload_type=#{object.class}")
|
return
|
||||||
return
|
end
|
||||||
else
|
|
||||||
receive_object(object,person)
|
if person
|
||||||
Rails.logger.info("event=receive status=complete recipient=#{self.diaspora_handle} sender=#{salmon_author.diaspora_handle} payload_type#{object.class}")
|
object.person = person if object.respond_to? :person=
|
||||||
return object
|
|
||||||
end
|
unless object.is_a?(Request) || self.contact_for(salmon_author)
|
||||||
|
Rails.logger.info("event=receive status=abort reason='sender not connected to recipient' recipient=#{self.diaspora_handle} sender=#{salmon_author.diaspora_handle} payload_type=#{object.class}")
|
||||||
|
return
|
||||||
|
else
|
||||||
|
receive_object(object,person)
|
||||||
|
Rails.logger.info("event=receive status=complete recipient=#{self.diaspora_handle} sender=#{salmon_author.diaspora_handle} payload_type#{object.class}")
|
||||||
|
return object
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
@ -102,7 +108,7 @@ module Diaspora
|
||||||
def receive_comment comment
|
def receive_comment comment
|
||||||
|
|
||||||
commenter = comment.person
|
commenter = comment.person
|
||||||
|
|
||||||
unless comment.post.person == self.person || comment.verify_post_creator_signature
|
unless comment.post.person == self.person || comment.verify_post_creator_signature
|
||||||
Rails.logger.info("event=receive status=abort reason='comment signature not valid' recipient=#{self.diaspora_handle} sender=#{comment.post.person.diaspora_handle} payload_type=#{comment.class} post_id=#{comment.post_id}")
|
Rails.logger.info("event=receive status=abort reason='comment signature not valid' recipient=#{self.diaspora_handle} sender=#{comment.post.person.diaspora_handle} payload_type=#{comment.class} post_id=#{comment.post_id}")
|
||||||
return
|
return
|
||||||
|
|
@ -136,10 +142,10 @@ module Diaspora
|
||||||
def receive_post post
|
def receive_post post
|
||||||
#exsists locally, but you dont know about it
|
#exsists locally, but you dont know about it
|
||||||
#does not exsist locally, and you dont know about it
|
#does not exsist locally, and you dont know about it
|
||||||
|
|
||||||
#exsists_locally?
|
#exsists_locally?
|
||||||
#you know about it, and it is mutable
|
#you know about it, and it is mutable
|
||||||
#you know about it, and it is not mutable
|
#you know about it, and it is not mutable
|
||||||
#
|
#
|
||||||
on_pod = exsists_on_pod?(post)
|
on_pod = exsists_on_pod?(post)
|
||||||
if on_pod && on_pod.diaspora_handle == post.diaspora_handle
|
if on_pod && on_pod.diaspora_handle == post.diaspora_handle
|
||||||
|
|
|
||||||
|
|
@ -1,127 +0,0 @@
|
||||||
require File.join(Rails.root, 'lib/hcard')
|
|
||||||
require File.join(Rails.root, 'lib/webfinger_profile')
|
|
||||||
|
|
||||||
class EMWebfinger
|
|
||||||
TIMEOUT = 5
|
|
||||||
REDIRECTS = 3
|
|
||||||
OPTS = {:timeout => TIMEOUT, :redirects => REDIRECTS}
|
|
||||||
def initialize(account)
|
|
||||||
@account = account.strip.gsub('acct:','').to_s
|
|
||||||
@callbacks = []
|
|
||||||
@ssl = true
|
|
||||||
Rails.logger.info("event=EMWebfinger status=initialized target=#{account}")
|
|
||||||
# Raise an error if identifier has a port number
|
|
||||||
#raise "Identifier is invalid" if(@account.strip.match(/\:\d+$/))
|
|
||||||
# Raise an error if identifier is not a valid email (generous regexp)
|
|
||||||
#raise "Identifier is invalid" if !(@account=~ /^[a-zA-Z][\w\.-]*[a-zA-Z0-9]@[a-zA-Z0-9][\w\.-]*[a-zA-Z0-9]\.[a-zA-Z][a-zA-Z\.]*[a-zA-Z]$/)
|
|
||||||
end
|
|
||||||
def fetch
|
|
||||||
if @callbacks.empty?
|
|
||||||
Rails.logger.info("event=EMWebfinger status=abort target=#{@account} callbacks=empty")
|
|
||||||
raise 'you need to set a callback before calling fetch'
|
|
||||||
end
|
|
||||||
person = Person.by_account_identifier(@account)
|
|
||||||
if person
|
|
||||||
Rails.logger.info("event=EMWebfinger status=local target=#{@account}")
|
|
||||||
process_callbacks person
|
|
||||||
else
|
|
||||||
Rails.logger.info("event=EMWebfinger status=remote target=#{@account}")
|
|
||||||
get_xrd
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def on_person(&block)
|
|
||||||
@callbacks << block
|
|
||||||
self.fetch
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def get_xrd
|
|
||||||
http = EventMachine::HttpRequest.new(xrd_url).get OPTS
|
|
||||||
http.callback {
|
|
||||||
profile_url = webfinger_profile_url(http.response)
|
|
||||||
if profile_url
|
|
||||||
get_webfinger_profile(profile_url)
|
|
||||||
elsif @ssl
|
|
||||||
@ssl = false
|
|
||||||
get_xrd
|
|
||||||
else
|
|
||||||
process_callbacks I18n.t('webfinger.not_enabled', :account => @account)
|
|
||||||
end
|
|
||||||
}
|
|
||||||
|
|
||||||
http.errback {
|
|
||||||
if @ssl
|
|
||||||
@ssl = false
|
|
||||||
get_xrd
|
|
||||||
else
|
|
||||||
process_callbacks I18n.t('webfinger.xrd_fetch_failed', :account => @account)
|
|
||||||
end }
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
def get_webfinger_profile(profile_url)
|
|
||||||
http = EventMachine::HttpRequest.new(profile_url).get OPTS
|
|
||||||
http.callback{ make_person_from_webfinger(http.response) }
|
|
||||||
http.errback{ process_callbacks I18n.t('webfinger.fetch_failed', :profile_url => profile_url) }
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_person_from_webfinger(webfinger_profile)
|
|
||||||
unless webfinger_profile.strip == ""
|
|
||||||
|
|
||||||
begin
|
|
||||||
wf_profile = WebfingerProfile.new(@account, webfinger_profile)
|
|
||||||
|
|
||||||
|
|
||||||
http = EventMachine::HttpRequest.new(wf_profile.hcard).get OPTS
|
|
||||||
http.callback{
|
|
||||||
begin
|
|
||||||
hcard = HCard.build http.response
|
|
||||||
p = Person.build_from_webfinger(wf_profile, hcard)
|
|
||||||
process_callbacks(p)
|
|
||||||
rescue
|
|
||||||
process_callbacks I18n.t 'webfinger.no_person_constructed'
|
|
||||||
end
|
|
||||||
}
|
|
||||||
http.errback{
|
|
||||||
process_callbacks I18n.t('webfinger.hcard_fetch_failed', :account => @account) }
|
|
||||||
|
|
||||||
rescue
|
|
||||||
process_callbacks "No person could be constructed from this webfinger profile."
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
def process_callbacks(person)
|
|
||||||
Rails.logger.info("event=EMWebfinger status=callbacks_started target=#{@account} response='#{person.is_a?(String) ? person : person.id}'")
|
|
||||||
@callbacks.each { |c|
|
|
||||||
begin
|
|
||||||
c.call(person)
|
|
||||||
rescue Exception => e
|
|
||||||
Rails.logger.info("event=EMWebfinger status=error_on_callback error='#{e.inspect}'")
|
|
||||||
end
|
|
||||||
}
|
|
||||||
Rails.logger.info("event=EMWebfinger status=complete target=#{@account}")
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
##helpers
|
|
||||||
private
|
|
||||||
|
|
||||||
def webfinger_profile_url(xrd_response)
|
|
||||||
doc = Nokogiri::XML::Document.parse(xrd_response)
|
|
||||||
return nil if doc.namespaces["xmlns"] != "http://docs.oasis-open.org/ns/xri/xrd-1.0"
|
|
||||||
swizzle doc.at('Link[rel=lrdd]').attribute('template').value
|
|
||||||
end
|
|
||||||
|
|
||||||
def xrd_url
|
|
||||||
domain = @account.split('@')[1]
|
|
||||||
"http#{'s' if @ssl}://#{domain}/.well-known/host-meta"
|
|
||||||
end
|
|
||||||
|
|
||||||
def swizzle(template)
|
|
||||||
template.gsub '{uri}', @account
|
|
||||||
end
|
|
||||||
end
|
|
||||||
3
lib/tasks/resque.rake
Normal file
3
lib/tasks/resque.rake
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
require 'resque/tasks'
|
||||||
|
task "resque:setup" => :environment
|
||||||
|
|
||||||
102
lib/webfinger.rb
Normal file
102
lib/webfinger.rb
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
require File.join(Rails.root, 'lib/hcard')
|
||||||
|
require File.join(Rails.root, 'lib/webfinger_profile')
|
||||||
|
|
||||||
|
class Webfinger
|
||||||
|
class WebfingerFailedError < RuntimeError; end
|
||||||
|
TIMEOUT = 5
|
||||||
|
REDIRECTS = 3
|
||||||
|
OPTS = {:timeout => TIMEOUT, :redirects => REDIRECTS}
|
||||||
|
def initialize(account)
|
||||||
|
@account = account.strip.gsub('acct:','').to_s
|
||||||
|
@ssl = true
|
||||||
|
Rails.logger.info("event=webfinger status=initialized target=#{account}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch
|
||||||
|
person = Person.by_account_identifier(@account)
|
||||||
|
if person
|
||||||
|
Rails.logger.info("event=webfinger status=success route=local target=#{@account}")
|
||||||
|
return person
|
||||||
|
end
|
||||||
|
|
||||||
|
profile_url = get_xrd
|
||||||
|
webfinger_profile = get_webfinger_profile(profile_url)
|
||||||
|
fingered_person = make_person_from_webfinger(webfinger_profile)
|
||||||
|
if fingered_person
|
||||||
|
Rails.logger.info("event=webfinger status=success route=remote target=#{@account}")
|
||||||
|
fingered_person
|
||||||
|
else
|
||||||
|
Rails.logger.info("event=webfinger status=failure route=remote target=#{@account}")
|
||||||
|
raise WebfingerFailedError.new(@account)
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
def get_xrd
|
||||||
|
begin
|
||||||
|
http = RestClient.get xrd_url, OPTS
|
||||||
|
|
||||||
|
profile_url = webfinger_profile_url(http.body)
|
||||||
|
if profile_url
|
||||||
|
return profile_url
|
||||||
|
else
|
||||||
|
raise "no profile URL"
|
||||||
|
end
|
||||||
|
rescue Exception => e
|
||||||
|
if @ssl
|
||||||
|
@ssl = false
|
||||||
|
retry
|
||||||
|
else
|
||||||
|
raise e
|
||||||
|
raise I18n.t('webfinger.xrd_fetch_failed', :account => @account)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
def get_webfinger_profile(profile_url)
|
||||||
|
begin
|
||||||
|
http = RestClient.get(profile_url, OPTS)
|
||||||
|
|
||||||
|
rescue
|
||||||
|
raise I18n.t('webfinger.fetch_failed', :profile_url => profile_url)
|
||||||
|
end
|
||||||
|
return http.body
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_person_from_webfinger(webfinger_profile)
|
||||||
|
unless webfinger_profile.strip == ""
|
||||||
|
|
||||||
|
wf_profile = WebfingerProfile.new(@account, webfinger_profile)
|
||||||
|
|
||||||
|
begin
|
||||||
|
hcard = RestClient.get(wf_profile.hcard, OPTS)
|
||||||
|
rescue
|
||||||
|
return I18n.t('webfinger.hcard_fetch_failed', :account => @account)
|
||||||
|
end
|
||||||
|
|
||||||
|
card = HCard.build hcard.body
|
||||||
|
p = Person.build_from_webfinger(wf_profile, card)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
##helpers
|
||||||
|
private
|
||||||
|
|
||||||
|
def webfinger_profile_url(xrd_response)
|
||||||
|
doc = Nokogiri::XML::Document.parse(xrd_response)
|
||||||
|
return nil if doc.namespaces["xmlns"] != "http://docs.oasis-open.org/ns/xri/xrd-1.0"
|
||||||
|
swizzle doc.at('Link[rel=lrdd]').attribute('template').value
|
||||||
|
end
|
||||||
|
|
||||||
|
def xrd_url
|
||||||
|
domain = @account.split('@')[1]
|
||||||
|
"http#{'s' if @ssl}://#{domain}/.well-known/host-meta"
|
||||||
|
end
|
||||||
|
|
||||||
|
def swizzle(template)
|
||||||
|
template.gsub '{uri}', @account
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
# the COPYRIGHT file.
|
# the COPYRIGHT file.
|
||||||
|
|
||||||
require File.dirname(__FILE__) + '/../config/environment'
|
require File.dirname(__FILE__) + '/../config/environment'
|
||||||
require File.dirname(__FILE__) + '/../lib/diaspora/websocket'
|
require File.dirname(__FILE__) + '/../lib/diaspora/web_socket'
|
||||||
|
|
||||||
at_exit do
|
at_exit do
|
||||||
begin
|
begin
|
||||||
|
|
|
||||||
|
|
@ -94,6 +94,13 @@ describe PeopleController do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe '#webfinger' do
|
||||||
|
it 'enqueues a webfinger job' do
|
||||||
|
Resque.should_receive(:enqueue).with(Jobs::SocketWebfinger, user.id, user.diaspora_handle, anything).once
|
||||||
|
get :retrieve_remote, :diaspora_handle => user.diaspora_handle
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
describe '#update' do
|
describe '#update' do
|
||||||
context 'with a profile photo set' do
|
context 'with a profile photo set' do
|
||||||
before do
|
before do
|
||||||
|
|
|
||||||
|
|
@ -7,42 +7,20 @@ require 'spec_helper'
|
||||||
|
|
||||||
describe PublicsController do
|
describe PublicsController do
|
||||||
render_views
|
render_views
|
||||||
let!(:user) { make_user }
|
let(:user) { make_user }
|
||||||
let!(:user2) { make_user }
|
|
||||||
let!(:aspect1) { user.aspects.create(:name => "foo") }
|
|
||||||
let!(:aspect2) { user2.aspects.create(:name => "far") }
|
|
||||||
let!(:aspect2) { user2.aspects.create(:name => 'disciples') }
|
|
||||||
let!(:req) { user2.send_contact_request_to(user.person, aspect2) }
|
|
||||||
let!(:xml) { user2.salmon(req).xml_for(user.person) }
|
|
||||||
let(:person){Factory(:person)}
|
let(:person){Factory(:person)}
|
||||||
|
|
||||||
before do
|
|
||||||
sign_in :user, user
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '#receive' do
|
describe '#receive' do
|
||||||
before do
|
let(:xml) { "<walruses></walruses>" }
|
||||||
EventMachine::HttpRequest.stub!(:new).and_return(FakeHttpRequest.new(:success))
|
context 'success cases' do
|
||||||
end
|
|
||||||
|
|
||||||
context 'success cases' do
|
|
||||||
before do
|
|
||||||
@person_mock = mock()
|
|
||||||
@user_mock = mock()
|
|
||||||
@user_mock.stub!(:receive_salmon).and_return(true)
|
|
||||||
@person_mock.stub!(:owner_id).and_return(true)
|
|
||||||
@person_mock.stub!(:owner).and_return(@user_mock)
|
|
||||||
Person.stub!(:first).and_return(@person_mock)
|
|
||||||
end
|
|
||||||
it 'should 200 on successful receipt of a request' do
|
it 'should 200 on successful receipt of a request' do
|
||||||
post :receive, :id =>user.person.id, :xml => xml
|
post :receive, :id =>user.person.id, :xml => xml
|
||||||
response.code.should == '200'
|
response.code.should == '200'
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should have the xml processed as salmon on success' do
|
it 'enqueues a receive job' do
|
||||||
@user_mock.should_receive(:receive_salmon).and_return(true)
|
Resque.should_receive(:enqueue).with(Jobs::ReceiveSalmon, user.id, xml).once
|
||||||
post :receive, :id => user.person.id, :xml => xml
|
post :receive, :id =>user.person.id, :xml => xml
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -55,8 +33,8 @@ describe PublicsController do
|
||||||
post :receive, :id => person.id, :xml => xml
|
post :receive, :id => person.id, :xml => xml
|
||||||
response.code.should == '404'
|
response.code.should == '404'
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
describe '#hcard' do
|
describe '#hcard' do
|
||||||
it 'queries by person id' do
|
it 'queries by person id' do
|
||||||
|
|
@ -97,33 +75,4 @@ describe PublicsController do
|
||||||
response.should be_not_found
|
response.should be_not_found
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'intergration tests that should not be in this file' do
|
|
||||||
describe 'contact requests' do
|
|
||||||
before do
|
|
||||||
req.delete
|
|
||||||
user2.reload
|
|
||||||
user2.pending_requests.count.should be 1
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'should accept a post from another node and save the information' do
|
|
||||||
pending
|
|
||||||
message = user2.build_post(:status_message, :message => "hi")
|
|
||||||
|
|
||||||
connect_users(user, aspect1, user2, aspect2)
|
|
||||||
|
|
||||||
user.reload
|
|
||||||
user.visible_post_ids.include?(message.id).should be false
|
|
||||||
|
|
||||||
xml1 = user2.salmon(message).xml_for(user.person)
|
|
||||||
|
|
||||||
EM::run{
|
|
||||||
post :receive, :id => user.person.id, :xml => xml1
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
user.reload
|
|
||||||
user.visible_post_ids.include?(message.id).should be true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,15 @@
|
||||||
|
|
||||||
require 'spec_helper'
|
require 'spec_helper'
|
||||||
|
|
||||||
require File.join(Rails.root, 'lib/em-webfinger')
|
require File.join(Rails.root, 'lib/webfinger')
|
||||||
|
|
||||||
describe EMWebfinger do
|
describe Webfinger do
|
||||||
let(:user1) { make_user }
|
let(:user1) { make_user }
|
||||||
let(:user2) { make_user }
|
let(:user2) { make_user }
|
||||||
|
|
||||||
let(:account) {"foo@tom.joindiaspora.com"}
|
let(:account) {"foo@tom.joindiaspora.com"}
|
||||||
let(:person){ Factory(:person, :diaspora_handle => account)}
|
let(:person){ Factory(:person, :diaspora_handle => account)}
|
||||||
let(:finger){EMWebfinger.new(account)}
|
let(:finger){Webfinger.new(account)}
|
||||||
|
|
||||||
|
|
||||||
let(:good_request) { FakeHttpRequest.new(:success)}
|
let(:good_request) { FakeHttpRequest.new(:success)}
|
||||||
|
|
@ -32,58 +32,16 @@ describe EMWebfinger do
|
||||||
|
|
||||||
describe '#intialize' do
|
describe '#intialize' do
|
||||||
it 'sets account ' do
|
it 'sets account ' do
|
||||||
n = EMWebfinger.new("mbs348@gmail.com")
|
n = Webfinger.new("mbs348@gmail.com")
|
||||||
n.instance_variable_get(:@account).should_not be nil
|
n.instance_variable_get(:@account).should_not be nil
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should raise an error on an unresonable email' do
|
|
||||||
proc{
|
|
||||||
EMWebfinger.new("joe.valid.email@my-address.com")
|
|
||||||
}.should_not raise_error(RuntimeError, "Identifier is invalid")
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'should not allow port numbers' do
|
|
||||||
pending
|
|
||||||
proc{
|
|
||||||
EMWebfinger.new('eviljoe@diaspora.local:3000')
|
|
||||||
}.should raise_error(RuntimeError, "Identifier is invalid")
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'should set ssl as the default' do
|
it 'should set ssl as the default' do
|
||||||
foo = EMWebfinger.new(account)
|
foo = Webfinger.new(account)
|
||||||
foo.instance_variable_get(:@ssl).should be true
|
foo.instance_variable_get(:@ssl).should be true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
describe '#on_person' do
|
|
||||||
it 'should set a callback' do
|
|
||||||
n = EMWebfinger.new("mbs@gmail.com")
|
|
||||||
n.stub(:fetch).and_return(true)
|
|
||||||
|
|
||||||
n.on_person{|person| 1+1}
|
|
||||||
n.instance_variable_get(:@callbacks).count.should be 1
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'should not blow up if the returned xrd is nil' do
|
|
||||||
http = FakeHttpRequest.new(:success)
|
|
||||||
fake_account = 'foo@example.com'
|
|
||||||
http.callbacks = ['']
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).and_return(http)
|
|
||||||
n = EMWebfinger.new("foo@example.com")
|
|
||||||
|
|
||||||
n.on_person{|person|
|
|
||||||
person.should == "webfinger does not seem to be enabled for #{fake_account}'s host"
|
|
||||||
}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '#fetch' do
|
|
||||||
it 'should require a callback' do
|
|
||||||
proc{finger.fetch }.should raise_error "you need to set a callback before calling fetch"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'webfinger query chain processing' do
|
context 'webfinger query chain processing' do
|
||||||
describe '#webfinger_profile_url' do
|
describe '#webfinger_profile_url' do
|
||||||
it 'should parse out the webfinger template' do
|
it 'should parse out the webfinger template' do
|
||||||
|
|
@ -114,81 +72,31 @@ describe EMWebfinger do
|
||||||
context 'webfingering local people' do
|
context 'webfingering local people' do
|
||||||
it 'should return a person from the database if it matches its handle' do
|
it 'should return a person from the database if it matches its handle' do
|
||||||
person.save
|
person.save
|
||||||
EM.run do
|
finger.fetch.id.should == person.id
|
||||||
finger.on_person { |p|
|
|
||||||
p.should == person
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should fetch a diaspora webfinger and make a person for them' do
|
it 'should fetch a diaspora webfinger and make a person for them' do
|
||||||
good_request.callbacks = [diaspora_xrd, diaspora_finger, hcard_xml]
|
diaspora_xrd.stub!(:body).and_return(diaspora_xrd)
|
||||||
|
hcard_xml.stub!(:body).and_return(hcard_xml)
|
||||||
|
diaspora_finger.stub!(:body).and_return(diaspora_finger)
|
||||||
|
RestClient.stub!(:get).and_return(diaspora_xrd, diaspora_finger, hcard_xml)
|
||||||
#new_person = Factory.build(:person, :diaspora_handle => "tom@tom.joindiaspora.com")
|
#new_person = Factory.build(:person, :diaspora_handle => "tom@tom.joindiaspora.com")
|
||||||
# http://tom.joindiaspora.com/.well-known/host-meta
|
# http://tom.joindiaspora.com/.well-known/host-meta
|
||||||
f = EMWebfinger.new("tom@tom.joindiaspora.com")
|
f = Webfinger.new("tom@tom.joindiaspora.com").fetch
|
||||||
|
f.should be_valid
|
||||||
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).exactly(3).times.and_return(good_request)
|
|
||||||
|
|
||||||
EM.run {
|
|
||||||
f.on_person{ |p|
|
|
||||||
p.valid?.should be true
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'should retry with http if https fails' do
|
it 'should retry with http if https fails' do
|
||||||
good_request.callbacks = [nil, diaspora_xrd, diaspora_finger, hcard_xml]
|
f = Webfinger.new("tom@tom.joindiaspora.com")
|
||||||
|
|
||||||
#new_person = Factory.build(:person, :diaspora_handle => "tom@tom.joindiaspora.com")
|
|
||||||
# http://tom.joindiaspora.com/.well-known/host-meta
|
|
||||||
f = EMWebfinger.new("tom@tom.joindiaspora.com")
|
|
||||||
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).exactly(4).times.and_return(good_request)
|
|
||||||
|
|
||||||
|
diaspora_xrd.stub!(:body).and_return(diaspora_xrd)
|
||||||
|
RestClient.should_receive(:get).twice.and_return(nil, diaspora_xrd)
|
||||||
f.should_receive(:xrd_url).twice
|
f.should_receive(:xrd_url).twice
|
||||||
|
f.send(:get_xrd)
|
||||||
EM.run {
|
f.instance_variable_get(:@ssl).should == false
|
||||||
f.on_person{ |p|
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'must try https first' do
|
|
||||||
single_request = FakeHttpRequest.new(:success)
|
|
||||||
single_request.callbacks = [diaspora_xrd]
|
|
||||||
good_request.callbacks = [diaspora_finger, hcard_xml]
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).with("https://tom.joindiaspora.com/.well-known/host-meta").and_return(single_request)
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).exactly(2).and_return(good_request)
|
|
||||||
|
|
||||||
f = EMWebfinger.new("tom@tom.joindiaspora.com")
|
|
||||||
|
|
||||||
EM.run {
|
|
||||||
f.on_person{ |p|
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'should retry with http if https fails with an http error code' do
|
|
||||||
bad_request = FakeHttpRequest.new(:failure)
|
|
||||||
|
|
||||||
good_request.callbacks = [diaspora_xrd, diaspora_finger, hcard_xml]
|
|
||||||
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).with("https://tom.joindiaspora.com/.well-known/host-meta").and_return(bad_request)
|
|
||||||
EventMachine::HttpRequest.should_receive(:new).exactly(3).and_return(good_request)
|
|
||||||
|
|
||||||
f = EMWebfinger.new("tom@tom.joindiaspora.com")
|
|
||||||
|
|
||||||
EM.run {
|
|
||||||
f.on_person{ |p|
|
|
||||||
EM.stop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
|
||||||
19
spec/models/jobs/receive_salmon_spec.rb
Normal file
19
spec/models/jobs/receive_salmon_spec.rb
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
require 'spec/spec_helper'
|
||||||
|
|
||||||
|
describe Jobs::ReceiveSalmon do
|
||||||
|
before do
|
||||||
|
@user = make_user
|
||||||
|
@xml = '<xml></xml>'
|
||||||
|
User.stub(:find){ |id|
|
||||||
|
if id == @user.id
|
||||||
|
@user
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
}
|
||||||
|
end
|
||||||
|
it 'calls receive_salmon' do
|
||||||
|
@user.should_receive(:receive_salmon).with(@xml).once
|
||||||
|
Jobs::ReceiveSalmon.perform(@user.id, @xml)
|
||||||
|
end
|
||||||
|
end
|
||||||
48
spec/models/jobs/socket_webfinger.rb
Normal file
48
spec/models/jobs/socket_webfinger.rb
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
require 'spec/spec_helper'
|
||||||
|
|
||||||
|
describe Jobs::SocketWebfinger do
|
||||||
|
before do
|
||||||
|
@user = make_user
|
||||||
|
@account = "tom@tom.joindiaspora.com"
|
||||||
|
end
|
||||||
|
it 'Makes a Webfinger object' do
|
||||||
|
Webfinger.should_receive(:new).with(@account)
|
||||||
|
Jobs::SocketWebfinger.perform(@user.id, @account)
|
||||||
|
end
|
||||||
|
it 'Queries the target account' do
|
||||||
|
finger = mock()
|
||||||
|
Webfinger.stub(:new).and_return(finger)
|
||||||
|
|
||||||
|
finger.should_receive(:fetch).and_return(Factory.create(:person))
|
||||||
|
Jobs::SocketWebfinger.perform(@user.id, @account)
|
||||||
|
end
|
||||||
|
it 'Sockets the resulting person on success' do
|
||||||
|
finger = mock()
|
||||||
|
Webfinger.stub(:new).and_return(finger)
|
||||||
|
person = Factory.create(:person)
|
||||||
|
finger.stub(:fetch).and_return(person)
|
||||||
|
|
||||||
|
person.should_receive(:socket_to_uid).with(@user.id, {})
|
||||||
|
Jobs::SocketWebfinger.perform(@user.id, @account)
|
||||||
|
end
|
||||||
|
it 'Passes opts through on success' do
|
||||||
|
finger = mock()
|
||||||
|
Webfinger.stub(:new).and_return(finger)
|
||||||
|
person = Factory.create(:person)
|
||||||
|
finger.stub(:fetch).and_return(person)
|
||||||
|
|
||||||
|
opts = {:symbol => true}
|
||||||
|
person.should_receive(:socket_to_uid).with(@user.id, opts)
|
||||||
|
Jobs::SocketWebfinger.perform(@user.id, @account, opts)
|
||||||
|
end
|
||||||
|
it 'sockets failure message on failure' do
|
||||||
|
finger = mock()
|
||||||
|
Webfinger.stub(:new).and_return(finger)
|
||||||
|
finger.stub(:fetch).and_raise(Webfinger::WebfingerFailedError)
|
||||||
|
|
||||||
|
opts = {:class => 'people', :status => 'fail', :query => @account, :response => I18n.t('people.webfinger.fail')}.to_json
|
||||||
|
Diaspora::WebSocket.should_receive(:queue_to_user).with(@user.id, opts)
|
||||||
|
Jobs::SocketWebfinger.perform(@user.id, @account)
|
||||||
|
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -15,22 +15,10 @@ describe User do
|
||||||
let(:user3) { make_user }
|
let(:user3) { make_user }
|
||||||
let(:aspect3) { user3.aspects.create(:name => 'heroes') }
|
let(:aspect3) { user3.aspects.create(:name => 'heroes') }
|
||||||
|
|
||||||
|
|
||||||
before do
|
before do
|
||||||
connect_users(user, aspect, user2, aspect2)
|
connect_users(user, aspect, user2, aspect2)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
it 'should stream only one message to the everyone aspect when a multi-aspected contacts posts' do
|
it 'should stream only one message to the everyone aspect when a multi-aspected contacts posts' do
|
||||||
user.add_person_to_aspect(user2.person.id, user.aspects.create(:name => "villains").id)
|
user.add_person_to_aspect(user2.person.id, user.aspects.create(:name => "villains").id)
|
||||||
status = user2.post(:status_message, :message => "Users do things", :to => aspect2.id)
|
status = user2.post(:status_message, :message => "Users do things", :to => aspect2.id)
|
||||||
|
|
@ -61,7 +49,8 @@ describe User do
|
||||||
|
|
||||||
describe '#receive_salmon' do
|
describe '#receive_salmon' do
|
||||||
it 'should handle the case where the webfinger fails' do
|
it 'should handle the case where the webfinger fails' do
|
||||||
Person.should_receive(:by_account_identifier).and_return("not a person")
|
pending "Write this to test #receive_salmon"
|
||||||
|
Webfinger.stub!(:fetch).and_return(nil)
|
||||||
|
|
||||||
proc{
|
proc{
|
||||||
user2.post :status_message, :message => "store this!", :to => aspect2.id
|
user2.post :status_message, :message => "store this!", :to => aspect2.id
|
||||||
|
|
|
||||||
|
|
@ -39,6 +39,13 @@ RSpec.configure do |config|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
|
module Resque
|
||||||
|
def enqueue(klass, *args)
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
ImageUploader.enable_processing = false
|
ImageUploader.enable_processing = false
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue