Implement archive import backend

This implements archive import feature.

The feature is divided in two main subfeatures: archive validation and archive import.

Archive validation performs different validation on input user archive. This can be
used without actually running import, e.g. when user wants to check the archive
before import from the frontend. Validators may add messages and modify the archive.

Validators are separated in two types: critical validators and non-critical validators.

If validations by critical validators fail it means we can't import archive.

If non-critical validations fail, we can import archive, but some warning messages
are rendered.

Also validators may change archive contents, e.g. when some entity can't be
imported it may be removed from the archive.

Validators' job is to take away complexity from the importer and perform the validations
which are not implemented in other parts of the system, e.g. DB validations or
diaspora_federation entity validations.

Archive importer then takes the modified archive from the validator and imports it.

In order to incapsulate high-level migration logic a MigrationService is
introduced. MigrationService links ArchiveValidator, ArchiveImporter and
AccountMigration.

Also here is introduced a rake task which may be used by podmins to run archive
import.
This commit is contained in:
cmrd Senya 2019-04-26 18:41:27 +03:00
parent e3c05b5620
commit f85f167f50
56 changed files with 2764 additions and 18 deletions

View file

@ -23,6 +23,7 @@ gem "diaspora_federation-rails", "0.2.5"
gem "acts_as_api", "1.0.1"
gem "json", "2.2.0"
gem "json-schema", "2.8.1"
gem "yajl-ruby", "1.4.1"
# Authentication

View file

@ -773,6 +773,7 @@ GEM
will_paginate (3.1.7)
xpath (3.2.0)
nokogiri (~> 1.8)
yajl-ruby (1.4.1)
yard (0.9.18)
PLATFORMS
@ -921,6 +922,7 @@ DEPENDENCIES
versionist (= 1.7.0)
webmock (= 3.5.1)
will_paginate (= 3.1.7)
yajl-ruby (= 1.4.1)
BUNDLED WITH
1.17.3

View file

@ -12,6 +12,7 @@ class AccountMigration < ApplicationRecord
after_create :lock_old_user!
attr_accessor :old_private_key
attr_writer :old_person_diaspora_id
def receive(*)
perform!
@ -29,15 +30,7 @@ class AccountMigration < ApplicationRecord
def perform!
raise "already performed" if performed?
validate_sender if locally_initiated?
ActiveRecord::Base.transaction do
account_deleter.tombstone_person_and_profile
account_deleter.close_user if user_left_our_pod?
account_deleter.tombstone_user if user_changed_id_locally?
update_all_references
end
tombstone_old_user_and_update_all_references if old_person
dispatch if locally_initiated?
dispatch_contacts if remotely_initiated?
update(completed_at: Time.zone.now)
@ -53,10 +46,20 @@ class AccountMigration < ApplicationRecord
# the new pod is informed about the migration as well.
def subscribers
new_user.profile.subscribers.remote.to_a.tap do |subscribers|
subscribers.push(old_person) if old_person.remote?
subscribers.push(old_person) if old_person&.remote?
end
end
# This method finds the newest user person profile in the migration chain.
# If person migrated multiple times then #new_person may point to a closed account.
# In this case in order to find open account we have to delegate new_person call to the next account_migration
# instance in the chain.
def newest_person
return new_person if new_person.account_migration.nil?
new_person.account_migration.newest_person
end
private
# Normally pod initiates migration locally when the new user is local. Then the pod creates AccountMigration object
@ -71,7 +74,7 @@ class AccountMigration < ApplicationRecord
end
def old_user
old_person.owner
old_person&.owner
end
def new_user
@ -90,6 +93,16 @@ class AccountMigration < ApplicationRecord
old_user && new_user
end
def tombstone_old_user_and_update_all_references
ActiveRecord::Base.transaction do
account_deleter.tombstone_person_and_profile
account_deleter.close_user if user_left_our_pod?
account_deleter.tombstone_user if user_changed_id_locally?
update_all_references
end
end
# We need to resend contacts of users of our pod for the remote new person so that the remote pod received this
# contact information from the authoritative source.
def dispatch_contacts
@ -112,9 +125,16 @@ class AccountMigration < ApplicationRecord
end
end
def old_person_diaspora_id
old_person&.diaspora_handle || @old_person_diaspora_id
end
def ephemeral_sender
raise "can't build sender without old private key defined" if old_private_key.nil?
EphemeralUser.new(old_person.diaspora_handle, old_private_key)
if old_private_key.nil? || old_person_diaspora_id.nil?
raise "can't build sender without old private key and diaspora ID defined"
end
EphemeralUser.new(old_person_diaspora_id, old_private_key)
end
def validate_sender
@ -128,7 +148,7 @@ class AccountMigration < ApplicationRecord
def person_references
references = Person.reflections.reject {|key, _|
%w[profile owner notifications pod].include?(key)
%w[profile owner notifications pod account_migration].include?(key)
}
references.map {|key, value|

View file

@ -57,6 +57,8 @@ class Person < ApplicationRecord
has_many :mentions, :dependent => :destroy
has_one :account_migration, foreign_key: :old_person_id, dependent: :nullify, inverse_of: :old_person
validate :owner_xor_pod
validate :other_person_with_same_guid, on: :create
validates :profile, :presence => true

View file

@ -3,6 +3,7 @@
class Poll < ApplicationRecord
include Diaspora::Federated::Base
include Diaspora::Fields::Guid
include Diaspora::Federated::Fetchable
belongs_to :status_message
has_many :poll_answers, -> { order "id ASC" }, dependent: :destroy

View file

@ -10,6 +10,7 @@ class Post < ApplicationRecord
include ApplicationHelper
include Diaspora::Federated::Base
include Diaspora::Federated::Fetchable
include Diaspora::Likeable
include Diaspora::Commentable

View file

@ -0,0 +1,76 @@
# frozen_string_literal: true
class MigrationService
attr_reader :archive_path, :new_user_name
delegate :errors, :warnings, to: :archive_validator
def initialize(archive_path, new_user_name)
@archive_path = archive_path
@new_user_name = new_user_name
end
def validate
archive_validator.validate
raise ArchiveValidationFailed, errors.join("\n") if errors.any?
raise MigrationAlreadyExists if AccountMigration.where(old_person: old_person).any?
end
def perform!
find_or_create_user
import_archive
run_migration
end
# when old person can't be resolved we still import data but we don't create&perform AccountMigration instance
def only_import?
old_person.nil?
end
private
def find_or_create_user
archive_importer.user = User.find_by(username: new_user_name)
archive_importer.create_user(username: new_user_name, password: SecureRandom.hex) if archive_importer.user.nil?
end
def import_archive
archive_importer.import
end
def run_migration
account_migration.save
account_migration.perform!
end
def account_migration
@account_migration ||= AccountMigration.new(
old_person: old_person,
new_person: archive_importer.user.person,
old_private_key: archive_importer.serialized_private_key,
old_person_diaspora_id: archive_importer.archive_author_diaspora_id
)
end
def old_person
@old_person ||= Person.by_account_identifier(archive_validator.archive_author_diaspora_id)
end
def archive_importer
@archive_importer ||= ArchiveImporter.new(archive_validator.archive_hash)
end
def archive_validator
@archive_validator ||= ArchiveValidator.new(archive_file)
end
def archive_file
# TODO: archive is likely to be a .json.gz file
File.new(archive_path, "r")
end
class ArchiveValidationFailed < RuntimeError
end
class MigrationAlreadyExists < RuntimeError
end
end

117
lib/archive_importer.rb Normal file
View file

@ -0,0 +1,117 @@
# frozen_string_literal: true
class ArchiveImporter
include ArchiveHelper
include Diaspora::Logging
attr_accessor :user
def initialize(archive_hash)
@archive_hash = archive_hash
end
def import
import_tag_followings
import_aspects
import_contacts
import_posts
import_relayables
import_subscriptions
import_others_relayables
end
def create_user(attr)
allowed_keys = %w[
email strip_exif show_community_spotlight_in_stream language disable_mail auto_follow_back
]
data = convert_keys(archive_hash["user"], allowed_keys)
data.merge!(
username: attr[:username],
password: attr[:password],
password_confirmation: attr[:password]
)
self.user = User.build(data)
user.save!
end
private
attr_reader :archive_hash
def import_contacts
import_collection(contacts, ContactImporter)
end
def set_auto_follow_back_aspect
name = archive_hash["user"]["auto_follow_back_aspect"]
return if name.nil?
aspect = user.aspects.find_by(name: name)
user.update(auto_follow_back_aspect: aspect) if aspect
end
def import_aspects
contact_groups.each do |group|
begin
user.aspects.create!(group.slice("name", "chat_enabled"))
rescue ActiveRecord::RecordInvalid => e
logger.warn "#{self}: #{e}"
end
end
set_auto_follow_back_aspect
end
def import_posts
import_collection(posts, PostImporter)
end
def import_relayables
import_collection(relayables, OwnRelayableImporter)
end
def import_others_relayables
import_collection(others_relayables, EntityImporter)
end
def import_collection(collection, importer_class)
collection.each do |object|
importer_class.new(object, user).import
end
end
def import_tag_followings
archive_hash.fetch("user").fetch("followed_tags", []).each do |tag_name|
begin
tag = ActsAsTaggableOn::Tag.find_or_create_by(name: tag_name)
user.tag_followings.create!(tag: tag)
rescue ActiveRecord::RecordInvalid => e
logger.warn "#{self}: #{e}"
end
end
end
def import_subscriptions
post_subscriptions.each do |post_guid|
post = Post.find_or_fetch_by(archive_author_diaspora_id, post_guid)
if post.nil?
logger.warn "#{self}: post with guid #{post_guid} not found, can't subscribe"
next
end
begin
user.participations.create!(target: post)
rescue ActiveRecord::RecordInvalid => e
logger.warn "#{self}: #{e}"
end
end
end
def convert_keys(hash, allowed_keys)
hash
.slice(*allowed_keys)
.symbolize_keys
end
def to_s
"#{self.class}:#{archive_author_diaspora_id}:#{user.diaspora_handle}"
end
end

View file

@ -0,0 +1,45 @@
# frozen_string_literal: true
class ArchiveImporter
module ArchiveHelper
def posts
@posts ||= archive_hash.fetch("user").fetch("posts", [])
end
def relayables
@relayables ||= archive_hash.fetch("user").fetch("relayables", [])
end
def others_relayables
@others_relayables ||= archive_hash.fetch("others_data", {}).fetch("relayables", [])
end
def post_subscriptions
archive_hash.fetch("user").fetch("post_subscriptions", [])
end
def contacts
archive_hash.fetch("user").fetch("contacts", [])
end
def contact_groups
@contact_groups ||= archive_hash.fetch("user").fetch("contact_groups", [])
end
def archive_author_diaspora_id
@archive_author_diaspora_id ||= archive_hash.fetch("user").fetch("profile").fetch("entity_data").fetch("author")
end
def person
@person ||= Person.find_or_fetch_by_identifier(archive_author_diaspora_id)
end
def private_key
OpenSSL::PKey::RSA.new(serialized_private_key)
end
def serialized_private_key
archive_hash.fetch("user").fetch("private_key")
end
end
end

View file

@ -0,0 +1,40 @@
# frozen_string_literal: true
class ArchiveImporter
class ContactImporter
include Diaspora::Logging
def initialize(json, user)
@json = json
@user = user
end
attr_reader :json
attr_reader :user
def import
@imported_contact = create_contact
add_to_aspects
rescue ActiveRecord::RecordInvalid => e
logger.warn "#{self}: #{e}"
end
private
def add_to_aspects
json.fetch("contact_groups_membership", []).each do |group_name|
aspect = user.aspects.find_by(name: group_name)
if aspect.nil?
logger.warn "#{self}: aspect \"#{group_name}\" is missing"
next
end
@imported_contact.aspects << aspect
end
end
def create_contact
person = Person.by_account_identifier(json.fetch("account_id"))
user.contacts.create!(person_id: person.id, sharing: false, receiving: json.fetch("receiving"))
end
end
end

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
class ArchiveImporter
class EntityImporter
include ArchiveValidator::EntitiesHelper
include Diaspora::Logging
def initialize(json, user)
@json = json
@user = user
end
def import
self.persisted_object = Diaspora::Federation::Receive.perform(entity)
rescue DiasporaFederation::Entities::Signable::SignatureVerificationFailed,
DiasporaFederation::Discovery::InvalidDocument,
DiasporaFederation::Discovery::DiscoveryError,
ActiveRecord::RecordInvalid => e
logger.warn "#{self}: #{e}"
end
attr_reader :json
attr_reader :user
attr_accessor :persisted_object
def entity
entity_class.from_json(json)
end
end
end

View file

@ -0,0 +1,31 @@
# frozen_string_literal: true
class ArchiveImporter
class OwnEntityImporter < EntityImporter
def import
substitute_author
super
rescue Diaspora::Federation::InvalidAuthor
return if real_author == old_author_id
logger.warn "#{self.class}: attempt to import an entity with guid \"#{guid}\" which belongs to #{real_author}"
end
private
def substitute_author
@old_author_id = entity_data["author"]
entity_data["author"] = user.diaspora_handle
end
attr_reader :old_author_id
def persisted_object
@persisted_object ||= (instance if real_author == old_author_id)
end
def real_author
instance.author.diaspora_handle
end
end
end

View file

@ -0,0 +1,25 @@
# frozen_string_literal: true
class ArchiveImporter
class OwnRelayableImporter < OwnEntityImporter
def entity
fetch_parent(symbolized_entity_data)
entity_class.new(symbolized_entity_data)
end
private
def symbolized_entity_data
@symbolized_entity_data ||= entity_data.slice(*entity_class.class_props.keys.map(&:to_s)).symbolize_keys
end
# Copied over from DiasporaFederation::Entities::Relayable
def fetch_parent(data)
type = data.fetch(:parent_type) {
break entity_class::PARENT_TYPE if entity_class.const_defined?(:PARENT_TYPE)
}
entity = Diaspora::Federation::Mappings.model_class_for(type).find_by(guid: data.fetch(:parent_guid))
data[:parent] = Diaspora::Federation::Entities.related_entity(entity)
end
end
end

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
class ArchiveImporter
class PostImporter < OwnEntityImporter
include Diaspora::Logging
def import
super
import_subscriptions if persisted_object
end
private
def substitute_author
super
return unless entity_type == "status_message"
entity_data["photos"].each do |photo|
photo["entity_data"]["author"] = user.diaspora_handle
end
end
def import_subscriptions
json.fetch("subscribed_users_ids", []).each do |diaspora_id|
begin
person = Person.find_or_fetch_by_identifier(diaspora_id)
person = person.account_migration.newest_person unless person.account_migration.nil?
next if person.closed_account?
# TODO: unless person.nil? import subscription: subscription import is not supported yet
rescue DiasporaFederation::Discovery::DiscoveryError
end
end
end
end
end

60
lib/archive_validator.rb Normal file
View file

@ -0,0 +1,60 @@
# frozen_string_literal: true
require "yajl"
# ArchiveValidator checks for errors in archive. It also find non-critical problems and fixes them in the archive hash
# so that the ArchiveImporter doesn't have to handle this issues. Non-critical problems found are indicated as warnings.
# Also it performs necessary data fetch where required.
class ArchiveValidator
include ArchiveImporter::ArchiveHelper
def initialize(archive)
@archive = archive
end
def validate
run_validators(CRITICAL_VALIDATORS, errors)
run_validators(NON_CRITICAL_VALIDATORS, warnings)
rescue KeyError => e
errors.push("Missing mandatory data: #{e}")
rescue Yajl::ParseError => e
errors.push("Bad JSON provided: #{e}")
end
def errors
@errors ||= []
end
def warnings
@warnings ||= []
end
def archive_hash
@archive_hash ||= Yajl::Parser.new.parse(archive)
end
CRITICAL_VALIDATORS = [
SchemaValidator,
AuthorPrivateKeyValidator
].freeze
NON_CRITICAL_VALIDATORS = [
ContactsValidator,
PostsValidator,
RelayablesValidator,
OthersRelayablesValidator
].freeze
private_constant :CRITICAL_VALIDATORS, :NON_CRITICAL_VALIDATORS
private
attr_reader :archive
def run_validators(list, messages)
list.each do |validator_class|
validator = validator_class.new(archive_hash)
messages.concat(validator.messages)
end
end
end

View file

@ -0,0 +1,17 @@
# frozen_string_literal: true
class ArchiveValidator
class AuthorPrivateKeyValidator < BaseValidator
include Diaspora::Logging
def validate
return if person.nil?
return if person.serialized_public_key == private_key.public_key.export
messages.push("Private key in the archive doesn't match the known key of #{person.diaspora_handle}")
rescue DiasporaFederation::Discovery::DiscoveryError
logger.info "#{self}: Archive author couldn't be fetched (old home pod is down?), will continue with data"\
" import only"
end
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
class ArchiveValidator
class BaseValidator
include ArchiveImporter::ArchiveHelper
attr_reader :archive_hash
def initialize(archive_hash)
@archive_hash = archive_hash
validate
end
def messages
@messages ||= []
end
def valid?
@valid.nil? ? messages.empty? : @valid
end
private
attr_writer :valid
def validate; end
end
end

View file

@ -0,0 +1,16 @@
# frozen_string_literal: true
class ArchiveValidator
class CollectionValidator < BaseValidator
# Runs validations over each element in collection and removes every element
# which fails the validations. Any messages produced by the entity_validator are
# concatenated to the messages of the CollectionValidator instance.
def validate
collection.keep_if do |item|
subvalidator = entity_validator.new(archive_hash, item)
messages.concat(subvalidator.messages)
subvalidator.valid?
end
end
end
end

View file

@ -0,0 +1,41 @@
# frozen_string_literal: true
class ArchiveValidator
class ContactValidator < BaseValidator
def initialize(archive_hash, contact)
@contact = contact
super(archive_hash)
end
private
def validate
handle_migrant_contact
self.valid = account_open?
rescue DiasporaFederation::Discovery::DiscoveryError => e
messages.push("#{self.class}: failed to fetch person #{diaspora_id}: #{e}")
self.valid = false
end
attr_reader :contact
def diaspora_id
contact.fetch("account_id")
end
def handle_migrant_contact
return if person.account_migration.nil?
contact["account_id"] = person.account_migration.newest_person.diaspora_handle
@person = nil
end
def person
@person ||= Person.find_or_fetch_by_identifier(diaspora_id)
end
def account_open?
!person.closed_account? || (messages.push("#{self.class}: account #{diaspora_id} is closed") && false)
end
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ArchiveValidator
class ContactsValidator < CollectionValidator
def collection
contacts
end
def entity_validator
ContactValidator
end
end
end

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
class ArchiveValidator
module EntitiesHelper
private
def instance
@instance ||= model_class.find_by(guid: guid)
end
def entity_type
json.fetch("entity_type")
end
def entity_data
json.fetch("entity_data")
end
def model_class
@model_class ||= Diaspora::Federation::Mappings.model_class_for(entity_type.camelize)
end
def entity_class
DiasporaFederation::Entity.entity_class(entity_type)
end
def guid
@guid ||= entity_data.fetch("guid")
end
def to_s
"#{entity_class.class_name}:#{guid}"
end
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ArchiveValidator
class OthersRelayablesValidator < CollectionValidator
def collection
others_relayables
end
def entity_validator
RelayableValidator
end
end
end

View file

@ -0,0 +1,19 @@
# frozen_string_literal: true
class ArchiveValidator
class OwnRelayableValidator < RelayableValidator
private
def post_find_by_guid(guid)
super || by_guid(Post, guid)
end
def post_find_by_poll_guid(guid)
super || by_guid(Poll, guid)&.status_message
end
def by_guid(klass, guid)
klass.find_or_fetch_by(archive_author_diaspora_id, guid)
end
end
end

View file

@ -0,0 +1,22 @@
# frozen_string_literal: true
class ArchiveValidator
class PostValidator < BaseValidator
include EntitiesHelper
def initialize(archive_hash, post)
@json = post
super(archive_hash)
end
private
def validate
return unless entity_type == "reshare" && entity_data["root_guid"].nil?
messages.push("reshare #{self} doesn't have a root, ignored")
end
attr_reader :json
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ArchiveValidator
class PostsValidator < CollectionValidator
def collection
posts
end
def entity_validator
PostValidator
end
end
end

View file

@ -0,0 +1,66 @@
# frozen_string_literal: true
class ArchiveValidator
# We have to validate relayables before import because during import we'll not be able to fetch parent anymore
# because parent author will point to ourselves.
class RelayableValidator < BaseValidator
include EntitiesHelper
def initialize(archive_hash, relayable)
@relayable = relayable
super(archive_hash)
end
private
def validate
self.valid = parent_present?
end
attr_reader :relayable
alias json relayable
# TODO: use diaspora federation to fetch parent where possible
# For own relayables we could just use RelatedEntity.fetch;
# For others' relayables we should check the present "own posts" first, and then if the target post is missing from
# there we could try to fetch it with RelatedEntity.fetch.
# Common methods used by subclasses:
def missing_parent_message
messages.push("Parent entity for #{self} is missing. Impossible to import, ignoring.")
end
def parent_present?
parent.present? || (missing_parent_message && false)
end
def parent
@parent ||= find_parent
end
def find_parent
if entity_type == "poll_participation"
post_find_by_poll_guid(parent_guid)
else
post_find_by_guid(parent_guid)
end
end
def parent_guid
entity_data.fetch("parent_guid")
end
def post_find_by_guid(guid)
posts.find {|post|
post.fetch("entity_data").fetch("guid") == guid
}
end
def post_find_by_poll_guid(guid)
posts.find {|post|
post.fetch("entity_data").fetch("poll", nil)&.fetch("entity_data", nil)&.fetch("guid", nil) == guid
}
end
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ArchiveValidator
class RelayablesValidator < CollectionValidator
def collection
relayables
end
def entity_validator
OwnRelayableValidator
end
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ArchiveValidator
class SchemaValidator < BaseValidator
JSON_SCHEMA = "lib/schemas/archive-format.json"
def validate
return if JSON::Validator.validate(JSON_SCHEMA, archive_hash)
messages.push("Archive schema validation failed")
end
end
end

View file

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Diaspora
module Federated
module Fetchable
extend ActiveSupport::Concern
module ClassMethods
def find_or_fetch_by(diaspora_id, guid)
instance = find_by(guid: guid)
return instance if instance.present?
DiasporaFederation::Federation::Fetcher.fetch_public(diaspora_id, to_s, guid)
find_by(guid: guid)
rescue DiasporaFederation::Federation::Fetcher::NotFetchable
nil
end
end
end
end
end

44
lib/tasks/accounts.rake Normal file
View file

@ -0,0 +1,44 @@
# frozen_string_literal: true
namespace :accounts do
desc "Perform migration"
task :migration, %i[archive_path new_user_name] => :environment do |_t, args|
puts "Account migration is requested"
args = %i[archive_path new_user_name].map {|name| [name, args[name]] }.to_h
process_arguments(args)
begin
service = MigrationService.new(args[:archive_path], args[:new_user_name])
service.validate
puts "Warnings:\n#{service.warnings}\n-----" if service.warnings.any?
if service.only_import?
puts "Warning: Archive owner is not fetchable. Proceeding with data import, but account migration record "\
"won't be created"
end
print "Do you really want to execute the archive import? Note: this is irreversible! [y/N]: "
next unless $stdin.gets.strip.casecmp?("y")
start_time = Time.now.getlocal
service.perform!
puts service.only_import? ? "Data import complete!" : "Data import and migration complete!"
puts "Migration took #{Time.now.getlocal - start_time} seconds"
rescue MigrationService::ArchiveValidationFailed => exception
puts "Errors in the archive found:\n#{exception.message}\n-----"
rescue MigrationService::MigrationAlreadyExists
puts "Migration record already exists for the user, can't continue"
end
end
def process_arguments(args)
if args[:archive_path].nil?
print "Enter the archive path: "
args[:archive_path] = $stdin.gets.strip
end
if args[:new_user_name].nil?
print "Enter the new user name: "
args[:new_user_name] = $stdin.gets.strip
end
puts "Archive path: #{args[:archive_path]}"
puts "New username: #{args[:new_user_name]}"
end
end

View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
def expect_person_fetch(diaspora_id, public_key)
expect(DiasporaFederation::Discovery::Discovery).to receive(:new).with(diaspora_id) {
double.tap {|instance|
expect(instance).to receive(:fetch_and_save) {
attributes = {diaspora_handle: diaspora_id}
attributes[:serialized_public_key] = public_key if public_key.present?
FactoryGirl.create(:person, attributes)
}
}
}
end

View file

@ -0,0 +1,54 @@
# frozen_string_literal: true
require "integration/archive_shared"
describe ArchiveValidator do
let(:json_file) { StringIO.new(json_string) }
let(:archive_validator) { ArchiveValidator.new(json_file) }
context "without known archive author" do
let(:private_key) { OpenSSL::PKey::RSA.generate(1024) }
let(:archive_author) { "user@oldpod.tld" }
let(:json_string) { <<~JSON }
{
"user": {
"username": "old_user",
"email": "mail@example.com",
"private_key": #{private_key.export.dump},
"profile": {
"entity_type": "profile",
"entity_data": {
"author": "#{archive_author}"
}
},
"contacts": [],
"contact_groups": [],
"post_subscriptions": [],
"posts": [],
"relayables": []
},
"others_data": {
"relayables": []
},
"version": "2.0"
}
JSON
it "fetches author" do
expect_person_fetch(archive_author, private_key.public_key.export)
archive_validator.validate
expect(archive_validator.warnings).to be_empty
expect(archive_validator.errors).to be_empty
end
end
context "when archive doesn't contain mandatory data" do
let(:json_string) { {}.to_json }
it "contains error" do
archive_validator.validate
expect(archive_validator.errors).to include('Missing mandatory data: key not found: "user"')
end
end
end

View file

@ -0,0 +1,395 @@
# frozen_string_literal: true
require "integration/federation/federation_helper"
require "integration/archive_shared"
describe MigrationService do
let(:old_pod_hostname) { "originalhomepod.tld" }
let(:archive_author) { "previous_username@#{old_pod_hostname}" }
let(:archive_private_key) { OpenSSL::PKey::RSA.generate(1024) }
let(:contact1_diaspora_id) { known_contact_person.diaspora_handle }
let(:contact2_diaspora_id) { Fabricate.sequence(:diaspora_id) }
let(:unknown_subscription_guid) { UUID.generate(:compact) }
let(:existing_subscription_guid) { UUID.generate(:compact) }
let(:reshare_entity) { Fabricate(:reshare_entity, author: archive_author) }
let(:reshare_entity_with_no_root) {
Fabricate(:reshare_entity, author: archive_author, root_guid: nil, root_author: nil)
}
let(:unknown_status_message_entity) { Fabricate(:status_message_entity, author: archive_author, public: false) }
let(:known_status_message_entity) { Fabricate(:status_message_entity, author: archive_author, public: false) }
let(:colliding_status_message_entity) { Fabricate(:status_message_entity, author: archive_author) }
let(:status_message_with_poll_entity) {
Fabricate(:status_message_entity,
author: archive_author,
poll: Fabricate(:poll_entity))
}
let(:status_message_with_location_entity) {
Fabricate(:status_message_entity,
author: archive_author,
location: Fabricate(:location_entity))
}
let(:status_message_with_photos_entity) {
Fabricate(:status_message_entity,
author: archive_author,
photos: [
Fabricate(:photo_entity, author: archive_author),
Fabricate(:photo_entity, author: archive_author)
])
}
let(:comment_entity) {
Fabricate(:comment_entity, author: archive_author, author_signature: "ignored XXXXXXXXXXXXXXXXXXXXXXXXXXX")
}
let(:like_entity) {
Fabricate(:like_entity,
author: archive_author,
author_signature: "ignored XXXXXXXXXXXXXXXXXXXXXXXXXXX",
parent_guid: FactoryGirl.create(:status_message).guid)
}
let(:poll_participation_entity) {
poll = FactoryGirl.create(:status_message_with_poll).poll
Fabricate(:poll_participation_entity,
author: archive_author,
author_signature: "ignored XXXXXXXXXXXXXXXXXXXXXXXXXXX",
poll_answer_guid: poll.poll_answers.first.guid,
parent_guid: poll.guid)
}
let(:unknown_poll_guid) { UUID.generate(:compact) }
let(:unknown_poll_answer_guid) { UUID.generate(:compact) }
let(:poll_participation_entity_unknown_root) {
Fabricate(:poll_participation_entity,
author: archive_author,
author_signature: "ignored XXXXXXXXXXXXXXXXXXXXXXXXXXX",
poll_answer_guid: unknown_poll_answer_guid,
parent_guid: unknown_poll_guid)
}
let(:others_comment_entity) {
data = Fabricate.attributes_for(:comment_entity,
author: remote_user_on_pod_b.diaspora_handle,
parent_guid: unknown_status_message_entity.guid)
data[:author_signature] = Fabricate(:comment_entity, data).sign_with_key(remote_user_on_pod_b.encryption_key)
Fabricate(:comment_entity, data)
}
let(:post_subscriber) { FactoryGirl.create(:person) }
let(:known_contact_person) { FactoryGirl.create(:person) }
let!(:collided_status_message) { FactoryGirl.create(:status_message, guid: colliding_status_message_entity.guid) }
let!(:collided_like) { FactoryGirl.create(:like, guid: like_entity.guid) }
let!(:reshare_root_author) { FactoryGirl.create(:person, diaspora_handle: reshare_entity.root_author) }
# This is for testing migrated contacts handling
let(:account_migration) { FactoryGirl.create(:account_migration).tap(&:perform!) }
let(:migrated_contact_diaspora_id) { account_migration.old_person.diaspora_handle }
let(:migrated_contact_new_diaspora_id) { account_migration.new_person.diaspora_handle }
let(:posts_in_archive) {
[
reshare_entity,
unknown_status_message_entity,
known_status_message_entity,
reshare_entity_with_no_root,
colliding_status_message_entity,
status_message_with_poll_entity,
status_message_with_location_entity,
status_message_with_photos_entity
]
}
let(:posts_in_archive_json) {
posts = posts_in_archive.map {|post|
post.to_json.as_json
}
posts[0]["subscribed_pods_uris"] = []
posts[1]["subscribed_users_ids"] = [post_subscriber.diaspora_handle]
posts[2]["subscribed_users_ids"] = [post_subscriber.diaspora_handle]
posts[3]["subscribed_pods_uris"] = []
posts[4]["subscribed_pods_uris"] = []
posts[5]["subscribed_pods_uris"] = []
posts[6]["subscribed_pods_uris"] = []
posts[7]["subscribed_pods_uris"] = []
posts.to_json
}
let(:archive_json) { <<~JSON }
{
"user": {
"username": "previous_username",
"email": "mail@example.com",
"private_key": #{archive_private_key.export.dump},
"profile": {
"entity_type": "profile",
"entity_data": {
"author": "#{archive_author}"
}
},
"contacts": [
{
"sharing": true,
"receiving": false,
"following": true,
"followed": false,
"account_id": "#{contact1_diaspora_id}",
"contact_groups_membership": ["Family"]
},
{
"sharing": true,
"receiving": true,
"following": true,
"followed": true,
"account_id": "#{migrated_contact_diaspora_id}",
"contact_groups_membership": ["Family"]
},
{
"sharing": true,
"receiving": true,
"following": true,
"followed": true,
"account_id": "#{contact2_diaspora_id}",
"contact_groups_membership": ["Family"]
}
],
"contact_groups": [
{"name":"Friends","chat_enabled":true},
{"name":"Friends","chat_enabled":false}
],
"post_subscriptions": [
"#{unknown_subscription_guid}",
"#{existing_subscription_guid}"
],
"posts": #{posts_in_archive_json},
"relayables": [
#{comment_entity.to_json.as_json.to_json},
#{like_entity.to_json.as_json.to_json},
#{poll_participation_entity.to_json.as_json.to_json},
#{poll_participation_entity_unknown_root.to_json.as_json.to_json}
]
},
"others_data": {
"relayables": [
#{others_comment_entity.to_json.as_json.to_json}
]
},
"version": "2.0"
}
JSON
def expect_reshare_root_fetch(root_author, root_guid)
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(root_author.diaspora_handle, "Post", root_guid) {
FactoryGirl.create(:status_message, guid: root_guid, author: root_author, public: true)
}
end
def expect_relayable_parent_fetch(relayable_author, parent_guid, parent_type="Post", &block)
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(relayable_author, parent_type, parent_guid, &block)
end
let(:new_username) { "newuser" }
let(:new_user_handle) { "#{new_username}@#{AppConfig.bare_pod_uri}" }
let(:archive_file) { Tempfile.new("archive") }
def setup_validation_time_expectations
expect_person_fetch(contact2_diaspora_id, nil)
# This is expected to be called during relayable validation
expect_relayable_parent_fetch(archive_author, comment_entity.parent_guid) {
FactoryGirl.create(:status_message, guid: comment_entity.parent_guid)
}
expect_relayable_parent_fetch(archive_author, unknown_poll_guid, "Poll") {
FactoryGirl.create(
:poll_answer,
poll: FactoryGirl.create(:poll, guid: unknown_poll_guid),
guid: unknown_poll_answer_guid
)
}
end
before do
archive_file.write(archive_json)
archive_file.close
allow_callbacks(
%i[queue_public_receive fetch_related_entity fetch_person_url_to fetch_public_key receive_entity
fetch_private_key]
)
end
shared_examples "imports archive" do
it "imports archive" do
expect_relayable_parent_fetch(archive_author, unknown_subscription_guid) {
FactoryGirl.create(:status_message, guid: unknown_subscription_guid)
}
expect_reshare_root_fetch(reshare_root_author, reshare_entity.root_guid)
service = MigrationService.new(archive_file.path, new_username)
service.validate
expect(service.warnings).to eq(
["reshare Reshare:#{reshare_entity_with_no_root.guid} doesn't have a root, ignored"]
)
service.perform!
user = User.find_by(username: new_username)
expect(user).not_to be_nil
unless Person.by_account_identifier(archive_author).nil?
expect(AccountMigration.where(new_person: user.person).any?).to be_truthy
existing_contact.reload
expect(existing_contact.person).to eq(user.person)
expect(existing_contact.sharing).to be_truthy
expect(existing_contact.receiving).to be_truthy
end
status_message = StatusMessage.find_by(guid: unknown_status_message_entity.guid)
expect(status_message.author).to eq(user.person)
# TODO: rewrite this expectation when new subscription implementation is there
# expect(status_message.participants).to include(post_subscriber)
status_message = StatusMessage.find_by(guid: known_status_message_entity.guid)
expect(status_message.author).to eq(user.person)
# TODO: rewrite this expectation when new subscription implementation is there
# expect(status_message.participants).to include(post_subscriber)
status_message = StatusMessage.find_by(guid: status_message_with_poll_entity.guid)
expect(status_message.author).to eq(user.person)
poll = status_message.poll
expect(poll).not_to be_nil
expect(poll.guid).to eq(status_message_with_poll_entity.poll.guid)
expect(poll.question).to eq(status_message_with_poll_entity.poll.question)
expect(poll.poll_answers.pluck(:answer, :guid)).to eq(
status_message_with_poll_entity.poll.poll_answers.map {|answer| [answer.answer, answer.guid] }
)
status_message = StatusMessage.find_by(guid: status_message_with_location_entity.guid)
expect(status_message.author).to eq(user.person)
expect(status_message.location.address).to eq(status_message_with_location_entity.location.address)
expect(status_message.location.lat).to eq(status_message_with_location_entity.location.lat)
expect(status_message.location.lng).to eq(status_message_with_location_entity.location.lng)
status_message = StatusMessage.find_by(guid: status_message_with_photos_entity.guid)
expect(status_message.author).to eq(user.person)
expect(
status_message.photos.pluck(:guid, :text, :remote_photo_path, :remote_photo_name, :width, :height)
).to match_array(
status_message_with_photos_entity.photos.map {|photo|
[photo.guid, photo.text, photo.remote_photo_path, photo.remote_photo_name, photo.width, photo.height]
}
)
comment = Comment.find_by(guid: comment_entity.guid)
expect(comment.author).to eq(user.person)
# Here we're testing the case when the like in the archive has the guid colliding with another known like
like = Like.find_by(guid: like_entity.guid)
expect(like.author).not_to eq(user.person)
contact = user.contacts.find_by(person: Person.by_account_identifier(contact1_diaspora_id))
expect(contact).not_to be_nil
expect(contact.sharing).to be_falsey
expect(contact.receiving).to be_falsey
contact = user.contacts.find_by(person: Person.by_account_identifier(contact2_diaspora_id))
expect(contact).not_to be_nil
expect(contact.sharing).to be_falsey
expect(contact.receiving).to be_truthy
contact = user.contacts.find_by(person: Person.by_account_identifier(migrated_contact_new_diaspora_id))
expect(contact).not_to be_nil
expect(contact.sharing).to be_falsey
expect(contact.receiving).to be_truthy
aspect = user.aspects.find_by(name: "Friends")
expect(aspect).not_to be_nil
expect(aspect.chat_enabled).to be_truthy
poll_participation = PollParticipation.find_by(author: user.person, guid: poll_participation_entity.guid)
expect(poll_participation).not_to be_nil
expect(poll_participation.parent.guid).to eq(poll_participation_entity.parent_guid)
expect(poll_participation.poll_answer.guid).to eq(poll_participation_entity.poll_answer_guid)
comment = Comment.find_by(guid: others_comment_entity.guid)
expect(comment.author.diaspora_handle).to eq(others_comment_entity.author)
expect(comment.parent.author.diaspora_handle).to eq(user.diaspora_handle)
end
end
context "old user is a known remote user" do
let(:old_person) {
FactoryGirl.create(:person,
profile: FactoryGirl.build(:profile),
serialized_public_key: archive_private_key.public_key.export,
diaspora_handle: archive_author)
}
# Some existing data for old_person to test data merge/migration
let!(:existing_contact) { FactoryGirl.create(:contact, person: old_person, sharing: true, receiving: true) }
let!(:existing_subscription) {
FactoryGirl.create(:participation,
author: old_person,
target: FactoryGirl.create(:status_message, guid: existing_subscription_guid))
}
let!(:existing_status_message) {
FactoryGirl.create(:status_message,
author: old_person,
guid: known_status_message_entity.guid).tap {|status_message|
status_message.participants << post_subscriber
}
}
it_behaves_like "imports archive" do
before do
setup_validation_time_expectations
end
end
context "when account migration already exists" do
before do
setup_validation_time_expectations
FactoryGirl.create(:account_migration, old_person: old_person)
end
it "raises exception" do
expect {
MigrationService.new(archive_file.path, new_username).validate
}.to raise_error(MigrationService::MigrationAlreadyExists)
end
end
describe "#only_import?" do
it "returns false" do
service = MigrationService.new(archive_file.path, new_username)
expect(service.only_import?).to be_falsey
end
end
end
context "old user is unknown" do
context "and non-fetchable" do
before do
expect(DiasporaFederation::Discovery::Discovery).to receive(:new).with(archive_author).and_call_original
stub_request(:get, "https://#{old_pod_hostname}/.well-known/webfinger?resource=acct:#{archive_author}")
.to_return(status: 404)
stub_request(:get, %r{https*://#{old_pod_hostname}/\.well-known/host-meta})
.to_return(status: 404)
expect_relayable_parent_fetch(archive_author, existing_subscription_guid)
.and_raise(DiasporaFederation::Federation::Fetcher::NotFetchable)
setup_validation_time_expectations
end
include_examples "imports archive"
end
describe "#only_import?" do
it "returns true" do
service = MigrationService.new(archive_file.path, new_username)
expect(service.only_import?).to be_truthy
end
end
end
end

View file

@ -186,7 +186,8 @@ describe AccountDeleter do
it "has all person association keys accounted for" do
ignored_or_special_ar_person_associations = %i[comments likes poll_participations contacts notification_actors
notifications owner profile pod conversations messages]
notifications owner profile pod conversations messages
account_migration]
all_keys = @account_deletion.normal_ar_person_associates_to_delete + ignored_or_special_ar_person_associations
expect(all_keys.sort_by(&:to_s)).to eq(Person.reflections.keys.sort_by(&:to_s).map(&:to_sym))
end

View file

@ -0,0 +1,66 @@
# frozen_string_literal: true
describe ArchiveImporter::ContactImporter do
let(:target) { FactoryGirl.create(:user) }
let(:contact_importer) { described_class.new(import_object, target) }
describe "#import" do
context "with duplicating data" do
let(:contact) { DataGenerator.new(target).mutual_friend.person.contacts.first }
let(:import_object) {
{
"person_guid" => contact.person.guid,
"account_id" => contact.person.diaspora_handle,
"receiving" => contact.receiving,
"public_key" => contact.person.serialized_public_key,
"person_name" => contact.person.full_name,
"followed" => contact.receiving,
"sharing" => contact.sharing,
"contact_groups_membership" => [
contact.aspects.first.name
],
"following" => contact.sharing
}
}
it "doesn't fail" do
expect {
contact_importer.import
}.not_to raise_error
expect(target.contacts.count).to eq(1)
end
end
context "with correct data" do
let(:aspect) { FactoryGirl.create(:aspect, user: target) }
let(:person) { FactoryGirl.create(:person) }
let(:import_object) {
{
"person_guid" => person.guid,
"account_id" => person.diaspora_handle,
"receiving" => true,
"public_key" => person.serialized_public_key,
"person_name" => person.full_name,
"followed" => true,
"sharing" => true,
"contact_groups_membership" => [
aspect.name
],
"following" => true
}
}
it "imports the contact" do
expect {
contact_importer.import
}.to change(Contact, :count).by(1)
contact = target.contacts.first
expect(contact).not_to be_nil
expect(contact.person).to eq(person)
expect(contact.aspects).to eq([aspect])
end
end
end
end

View file

@ -0,0 +1,101 @@
# frozen_string_literal: true
describe ArchiveImporter::EntityImporter do
let(:instance) { ArchiveImporter::EntityImporter.new(json, nil) }
describe "#import" do
context "with status_message" do
let(:guid) { UUID.generate(:compact) }
let(:json) { JSON.parse(<<~JSON) }
{
"entity_data" : {
"created_at" : "2015-10-19T13:58:16Z",
"guid" : "#{guid}",
"text" : "test post",
"author" : "author@example.com"
},
"entity_type" : "status_message"
}
JSON
context "with known author" do
let!(:author) { FactoryGirl.create(:person, diaspora_handle: "author@example.com") }
it "runs entity receive routine" do
expect(Diaspora::Federation::Receive).to receive(:perform)
.with(kind_of(DiasporaFederation::Entities::StatusMessage))
.and_call_original
instance.import
status_message = StatusMessage.find_by(guid: guid)
expect(status_message).not_to be_nil
expect(status_message.author).to eq(author)
end
end
context "with unknown author" do
it "handles missing person" do
expect {
instance.import
}.not_to raise_error
expect(StatusMessage.find_by(guid: guid)).to be_nil
end
end
end
context "with comment" do
let(:status_message) { FactoryGirl.create(:status_message) }
let(:author) { FactoryGirl.create(:user) }
let(:comment_entity) {
data = Fabricate.attributes_for(:comment_entity,
author: author.diaspora_handle,
parent_guid: status_message.guid)
data[:author_signature] = Fabricate(:comment_entity, data).sign_with_key(author.encryption_key)
Fabricate(:comment_entity, data)
}
let(:guid) { comment_entity.guid }
let(:json) { comment_entity.to_json.as_json }
it "runs entity receive routine" do
expect(Diaspora::Federation::Receive).to receive(:perform)
.with(kind_of(DiasporaFederation::Entities::Comment))
.and_call_original
instance.import
comment = Comment.find_by(guid: guid)
expect(comment).not_to be_nil
expect(comment.author).to eq(author.person)
end
it "rescues DiasporaFederation::Entities::Signable::SignatureVerificationFailed" do
expect(Person).to receive(:find_or_fetch_by_identifier)
.with(author.diaspora_handle)
.and_raise DiasporaFederation::Entities::Signable::SignatureVerificationFailed
expect {
instance.import
}.not_to raise_error
end
it "rescues DiasporaFederation::Discovery::InvalidDocument" do
expect(Person).to receive(:find_or_fetch_by_identifier)
.with(author.diaspora_handle)
.and_raise DiasporaFederation::Discovery::InvalidDocument
expect {
instance.import
}.not_to raise_error
end
it "rescues DiasporaFederation::Discovery::DiscoveryError" do
expect(Person).to receive(:find_or_fetch_by_identifier)
.with(author.diaspora_handle)
.and_raise DiasporaFederation::Discovery::DiscoveryError
expect {
instance.import
}.not_to raise_error
end
end
end
end

View file

@ -0,0 +1,44 @@
# frozen_string_literal: true
shared_examples "own entity importer" do
describe "#import" do
let(:new_user) { FactoryGirl.create(:user) }
let(:instance) { described_class.new(entity_json.as_json, new_user) }
context "with known entity" do
context "with correct author in json" do
let(:entity_json) { known_entity_with_correct_author }
it "doesn't import" do
expect {
instance.import
}.not_to change(entity_class, :count)
end
end
context "with incorrect author in json" do
let(:entity_json) { known_entity_with_incorrect_author }
it "doesn't import" do
expect {
instance.import
}.not_to change(entity_class, :count)
end
end
end
context "with unknown entity" do
let(:guid) { unknown_entity[:entity_data][:guid] }
let(:entity_json) { unknown_entity }
it "imports with author substitution" do
expect {
instance.import
}.to change(entity_class, :count).by(1)
status_message = entity_class.find_by(guid: guid)
expect(status_message.author).to eq(new_user.person)
end
end
end
end

View file

@ -0,0 +1,28 @@
# frozen_string_literal: true
require "lib/archive_importer/own_entity_importer_shared"
describe ArchiveImporter::OwnEntityImporter do
it_behaves_like "own entity importer" do
let(:entity_class) { StatusMessage }
let!(:status_message) { FactoryGirl.create(:status_message) }
let(:entity) { Diaspora::Federation::Entities.build(status_message) }
let(:known_entity_with_correct_author) {
entity.to_json
}
let(:known_entity_with_incorrect_author) {
result = known_entity_with_correct_author
result[:entity_data][:author] = FactoryGirl.create(:person).diaspora_handle
result
}
let(:unknown_entity) {
result = known_entity_with_correct_author
result[:entity_data][:author] = Fabricate.sequence(:diaspora_id)
result[:entity_data][:guid] = UUID.generate(:compact)
result
}
end
end

View file

@ -0,0 +1,31 @@
# frozen_string_literal: true
require "lib/archive_importer/own_entity_importer_shared"
describe ArchiveImporter::OwnRelayableImporter do
it_behaves_like "own entity importer" do
let(:entity_class) { Comment }
let!(:comment) { FactoryGirl.create(:comment, author: FactoryGirl.create(:user).person) }
let(:known_entity_with_correct_author) {
Diaspora::Federation::Entities.build(comment).to_json
}
let(:known_entity_with_incorrect_author) {
Fabricate(
:comment_entity,
author: FactoryGirl.create(:user).diaspora_handle,
guid: comment.guid,
parent_guid: comment.parent.guid
).to_json
}
let(:unknown_entity) {
Fabricate(
:comment_entity,
author: FactoryGirl.create(:user).diaspora_handle,
parent_guid: FactoryGirl.create(:status_message).guid
).to_json
}
end
end

View file

@ -0,0 +1,117 @@
# frozen_string_literal: true
require "lib/archive_importer/own_entity_importer_shared"
describe ArchiveImporter::PostImporter do
describe "#import" do
let(:old_person) { post.author }
let(:new_user) { FactoryGirl.create(:user) }
let(:entity) { Diaspora::Federation::Entities.build(post) }
let(:entity_json) { entity.to_json.as_json }
let(:instance) { described_class.new(entity_json, new_user) }
it_behaves_like "own entity importer" do
let(:entity_class) { StatusMessage }
let!(:post) { FactoryGirl.create(:status_message) }
let(:known_entity_with_correct_author) {
entity.to_json
}
let(:known_entity_with_incorrect_author) {
result = known_entity_with_correct_author
result[:entity_data][:author] = FactoryGirl.create(:person).diaspora_handle
result
}
let(:unknown_entity) {
result = known_entity_with_correct_author
result[:entity_data][:author] = Fabricate.sequence(:diaspora_id)
result[:entity_data][:guid] = UUID.generate(:compact)
result
}
end
context "with subscription" do
let(:post) { FactoryGirl.build(:status_message, public: true) }
let(:subscribed_person) { FactoryGirl.create(:person) }
let(:subscribed_person_id) { subscribed_person.diaspora_handle }
before do
entity_json.deep_merge!("subscribed_users_ids" => [subscribed_person_id])
end
# TODO: rewrite this test when new subscription implementation is there
xit "creates a subscription for the post" do
instance.import
imported_post = Post.find_by(guid: post.guid)
expect(imported_post).not_to be_nil
expect(imported_post.participations.first.author).to eq(subscribed_person)
end
context "when subscribed user's account is closed" do
before do
AccountDeleter.new(subscribed_person).perform!
end
# TODO: rewrite this test when new subscription implementation is there
xit "doesn't create a subscription" do
instance.import
imported_post = Post.find_by(guid: post.guid)
expect(imported_post).not_to be_nil
expect(imported_post.participations).to be_empty
end
end
context "when subscribed user has migrated" do
let(:account_migration) { FactoryGirl.create(:account_migration) }
let(:subscribed_person) { account_migration.old_person }
# TODO: rewrite this test when new subscription implementation is there
xit "creates participation for the new user" do
instance.import
imported_post = Post.find_by(guid: post.guid)
expect(imported_post).not_to be_nil
expect(imported_post.participations.first.author).to eq(account_migration.new_person)
end
end
context "when subscribed user is not fetchable" do
let(:subscribed_person_id) { "old_id@old_pod.nowhere" }
it "doesn't fail" do
stub_request(
:get,
%r{https*://old_pod\.nowhere/\.well-known/webfinger\?resource=acct:old_id@old_pod\.nowhere}
).to_return(status: 404, body: "", headers: {})
stub_request(:get, %r{https*://old_pod\.nowhere/\.well-known/host-meta})
.to_return(status: 404, body: "", headers: {})
expect {
instance.import
}.not_to raise_error
end
end
end
context "with photos" do
let(:photo_entity) { Fabricate(:photo_entity) }
let(:entity) { Fabricate(:status_message_entity, photos: [photo_entity], author: photo_entity.author) }
describe "#import" do
it "substitutes photo author" do
expect {
instance.import
}.not_to raise_error
photo = Photo.find_by(guid: photo_entity.guid)
expect(photo).not_to be_nil
expect(photo.author).to eq(new_user.person)
end
end
end
end
end

View file

@ -0,0 +1,148 @@
# frozen_string_literal: true
require "integration/federation/federation_helper"
describe ArchiveImporter do
describe "#import" do
let(:target) { FactoryGirl.create(:user) }
let(:archive_importer) {
archive_importer = ArchiveImporter.new(archive_hash)
archive_importer.user = target
archive_importer
}
context "with tag following" do
let(:archive_hash) {
{
"user" => {
"profile" => {
"entity_data" => {
"author" => "old_id@old_pod.nowhere"
}
},
"followed_tags" => ["testtag"]
}
}
}
it "imports tag" do
archive_importer.import
expect(target.tag_followings.first.tag.name).to eq("testtag")
end
end
context "with subscription" do
let(:status_message) { FactoryGirl.create(:status_message) }
let(:archive_hash) {
{
"user" => {
"profile" => {
"entity_data" => {
"author" => "old_id@old_pod.nowhere"
}
},
"post_subscriptions" => [status_message.guid]
}
}
}
it "imports tag" do
archive_importer.import
expect(target.participations.first.target).to eq(status_message)
end
end
context "with duplicates" do
let(:archive_hash) {
{
"user" => {
"auto_follow_back_aspect" => "Friends",
"profile" => {
"entity_data" => {
"author" => "old_id@old_pod.nowhere"
}
},
"contact_groups" => [{
"chat_enabled" => true,
"name" => "Friends"
}],
"followed_tags" => [target.tag_followings.first.tag.name],
"post_subscriptions" => [target.participations.first.target.guid]
}
}
}
before do
DataGenerator.create(target, %i[tag_following subscription])
end
it "doesn't fail" do
expect {
archive_importer.import
}.not_to raise_error
end
end
context "with non-fetchable subscription" do
let(:archive_hash) {
{
"user" => {
"profile" => {
"entity_data" => {
"author" => "old_id@old_pod.nowhere"
}
},
"post_subscriptions" => ["XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"]
}
}
}
before do
stub_request(:get, %r{https*://old_pod\.nowhere/\.well-known/webfinger\?resource=acct:old_id@old_pod\.nowhere})
.to_return(status: 404, body: "", headers: {})
stub_request(:get, %r{https*://old_pod\.nowhere/\.well-known/host-meta})
.to_return(status: 404, body: "", headers: {})
end
it "doesn't fail" do
expect {
archive_importer.import
}.not_to raise_error
end
end
end
describe "#create_user" do
let(:archive_importer) { ArchiveImporter.new(archive_hash) }
let(:archive_hash) {
{
"user" => {
"profile" => {
"entity_data" => {
"author" => "old_id@old_pod.nowhere"
}
},
"email" => "user@example.com",
"strip_exif" => false,
"show_community_spotlight_in_stream" => false,
"language" => "ru",
"disable_mail" => false,
"auto_follow_back" => true
}
}
}
it "creates user" do
expect {
archive_importer.create_user(username: "new_name", password: "123456")
}.to change(User, :count).by(1)
expect(archive_importer.user.email).to eq("user@example.com")
expect(archive_importer.user.strip_exif).to eq(false)
expect(archive_importer.user.show_community_spotlight_in_stream).to eq(false)
expect(archive_importer.user.language).to eq("ru")
expect(archive_importer.user.disable_mail).to eq(false)
expect(archive_importer.user.auto_follow_back).to eq(true)
end
end
end

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::AuthorPrivateKeyValidator do
include_context "validators shared context"
context "when private key doesn't match the key in the archive" do
let(:author) { FactoryGirl.create(:person) }
it "contains error" do
expect(validator.messages)
.to include("Private key in the archive doesn't match the known key of #{author_id}")
end
end
context "when private key matches the key in the archive" do
let(:author) { FactoryGirl.create(:person, serialized_public_key: author_pkey.public_key.export) }
include_examples "validation result is valid"
end
context "with non-fetchable author" do
let(:author_id) { "old_id@old_pod.nowhere" }
before do
stub_request(:get, %r{https*://old_pod\.nowhere/\.well-known/webfinger\?resource=acct:old_id@old_pod\.nowhere})
.to_return(status: 404, body: "", headers: {})
stub_request(:get, %r{https*://old_pod\.nowhere/\.well-known/host-meta})
.to_return(status: 404, body: "", headers: {})
end
include_examples "validation result is valid"
end
end

View file

@ -0,0 +1,40 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::CollectionValidator do
include_context "validators shared context"
class TestValidator < ArchiveValidator::BaseValidator
def initialize(_archive_hash, item)
super({})
self.valid = item
messages.push("This element is invalid!") unless item
end
end
class TestCollectionValidator < ArchiveValidator::CollectionValidator
def initialize(collection)
@collection = collection
super({})
end
def entity_validator
TestValidator
end
attr_reader :collection
end
it "validates when all collection elements are validated" do
validator = TestCollectionValidator.new([true, true, true])
expect(validator.collection).to eq([true, true, true])
expect(validator.messages).to be_empty
end
it "removes invalid elements from the collection and add keeps failure messages" do
validator = TestCollectionValidator.new([true, false, true])
expect(validator.collection).to eq([true, true])
expect(validator.messages).to eq(["This element is invalid!"])
end
end

View file

@ -0,0 +1,115 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::ContactValidator do
include_context "validators shared context"
include_context "with known author"
let(:validator) { described_class.new(input_hash, contact) }
before do
include_in_input_archive(
user: {
contacts: [contact]
}
)
end
context "with a correct contact" do
let(:known_id) { FactoryGirl.create(:person).diaspora_handle }
before do
include_in_input_archive(
user: {contact_groups: [{name: "generic"}]}
)
end
let(:contact) {
{
"account_id" => known_id,
"contact_groups_membership" => ["generic"]
}
}
include_examples "validation result is valid"
end
context "when person referenced in contact is unknown" do
let(:unknown_id) { Fabricate.sequence(:diaspora_id) }
let(:contact) {
{
"account_id" => unknown_id
}
}
context "and discovery is successful" do
before do
expect_any_instance_of(DiasporaFederation::Discovery::Discovery).to receive(:fetch_and_save) {
FactoryGirl.create(:person, diaspora_handle: unknown_id)
}
end
include_examples "validation result is valid"
end
context "and discovery fails" do
before do
expect_any_instance_of(DiasporaFederation::Discovery::Discovery)
.to receive(:fetch_and_save).and_raise(
DiasporaFederation::Discovery::DiscoveryError, "discovery error reasons"
)
end
it "is not valid" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to include(
"ArchiveValidator::ContactValidator: failed to fetch person #{unknown_id}: discovery error reasons"
)
end
end
end
context "when person is deleted" do
let(:person) { FactoryGirl.create(:person) }
let(:diaspora_id) { person.diaspora_handle }
let(:contact) {
{
"account_id" => diaspora_id,
"contact_groups_membership" => ["generic"]
}
}
before do
AccountDeleter.new(person).perform!
end
it "is not valid" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to include(
"ArchiveValidator::ContactValidator: account #{diaspora_id} is closed"
)
end
end
context "when person is migrated" do
let(:account_migration) { FactoryGirl.create(:account_migration).tap(&:perform!) }
let(:person) { account_migration.old_person }
let(:diaspora_id) { person.diaspora_handle }
let(:contact) {
{
"account_id" => diaspora_id,
"contact_groups_membership" => ["generic"]
}
}
it "is valid and person reference is updated" do
expect(validator.valid?).to be_truthy
expect(contact["account_id"]).to eq(account_migration.new_person.diaspora_handle)
expect(validator.messages).to be_empty
end
end
end

View file

@ -0,0 +1,61 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::ContactsValidator do
include_context "validators shared context"
include_context "with known author"
let(:correct_item) {
person = FactoryGirl.create(:person)
{
"contact_groups_membership" => [],
"person_guid" => person.guid,
"public_key" => person.serialized_public_key,
"followed" => false,
"receiving" => false,
"sharing" => true,
"person_name" => person.name,
"following" => true,
"account_id" => person.diaspora_handle
}
}
let(:correct_archive) {
{
"user" => {
"contacts" => [correct_item]
}
}
}
let(:incorrect_item) {
person = FactoryGirl.create(:person)
person.lock_access!
{
"contact_groups_membership" => [],
"person_guid" => person.guid,
"public_key" => person.serialized_public_key,
"followed" => false,
"receiving" => false,
"sharing" => true,
"person_name" => person.name,
"following" => true,
"account_id" => person.diaspora_handle
}
}
let(:archive_with_error) {
{
"user" => {
"contacts" => [correct_item, incorrect_item]
}
}
}
let(:element_validator_class) {
ArchiveValidator::ContactValidator
}
include_examples "a collection validator"
end

View file

@ -0,0 +1,76 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::OthersRelayablesValidator do
include_context "validators shared context"
include_context "with known author"
let(:parent_guid) { UUID.generate :compact }
before do
include_in_input_archive(
user: {
posts: [
{
entity_type: "status_message",
subscribed_users_ids: [],
entity_data: {
text: "test",
author: author_id,
public: false,
guid: parent_guid
}
}
]
}
)
end
let(:correct_item) {
{
"entity_type" => "like",
"entity_data" => {
"positive" => true,
"parent_type" => "Post",
"author" => "test-1@example.com",
"parent_guid" => parent_guid,
"guid" => UUID.generate(:compact)
}
}
}
let(:correct_archive) {
{
others_data: {
relayables: [correct_item]
}
}
}
let(:incorrect_item) {
{
"entity_type" => "like",
"entity_data" => {
"positive" => true,
"parent_type" => "Post",
"author" => "test-1@example.com",
"parent_guid" => UUID.generate(:compact),
"guid" => UUID.generate(:compact)
}
}
}
let(:archive_with_error) {
{
others_data: {
relayables: [correct_item, incorrect_item]
}
}
}
let(:element_validator_class) {
ArchiveValidator::RelayableValidator
}
include_examples "a collection validator"
end

View file

@ -0,0 +1,159 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::OwnRelayableValidator do
include_context "validators shared context"
include_context "relayable validator context"
let(:relayable_entity) { :comment_entity }
let(:author) { FactoryGirl.create(:user).person }
let(:relayable_author) {
author_id
}
def create_root
FactoryGirl.create(:status_message, guid: parent_guid)
end
before do
relayable["entity_data"].delete("author_signature")
create_root
end
it_behaves_like "a relayable validator"
context "when root is unknown" do
def create_root; end
context "it fetches root" do
before do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(author.diaspora_handle, "Post", parent_guid) {
FactoryGirl.create(:status_message, guid: parent_guid)
}
end
include_examples "validation result is valid"
end
context "when root is in the archive and is an own post" do
before do
include_in_input_archive(
user: {
posts: [
entity_data: {
text: "123456",
created_at: "2017-07-03T08:12:25Z",
photos: [],
author: author_id,
public: false,
guid: parent_guid
},
entity_type: "status_message"
]
}
)
expect(DiasporaFederation::Federation::Fetcher)
.not_to receive(:fetch_public)
end
include_examples "validation result is valid"
end
context "when fetching fails" do
before do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(author.diaspora_handle, "Post", parent_guid)
.and_raise(DiasporaFederation::Federation::Fetcher::NotFetchable)
end
it "is not valid and contains a message" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to include("Parent entity for Comment:#{guid} is missing. "\
"Impossible to import, ignoring.")
end
end
end
context "with a poll participation" do
let(:relayable_entity) { :poll_participation_entity }
context "with known root" do
def create_root
smwp = FactoryGirl.create(:status_message_with_poll)
smwp.poll.update(guid: parent_guid)
end
include_examples "validation result is valid"
end
context "when root in unknown" do
def create_root; end
context "it fetches root" do
before do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(author.diaspora_handle, "Poll", parent_guid) {
FactoryGirl.create(:poll, guid: parent_guid)
}
end
include_examples "validation result is valid"
end
context "when root is in the archive and is an own post" do
before do
include_in_input_archive(
user: {
posts: [
entity_data: {
text: "123456",
created_at: "2017-07-03T08:12:25Z",
photos: [],
author: author_id,
public: false,
guid: "1234567890abcdef",
poll: {
entity_type: "poll",
entity_data: {
guid: parent_guid,
question: "1234567 ?",
poll_answers: []
}
}
},
entity_type: "status_message"
]
}
)
expect(DiasporaFederation::Federation::Fetcher)
.not_to receive(:fetch_public)
end
include_examples "validation result is valid"
end
context "when fetching fails" do
before do
expect(DiasporaFederation::Federation::Fetcher)
.to receive(:fetch_public)
.with(author.diaspora_handle, "Poll", parent_guid)
.and_raise(DiasporaFederation::Federation::Fetcher::NotFetchable)
end
it "is not valid and contains a message" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to include("Parent entity for PollParticipation:#{guid} is missing. "\
"Impossible to import, ignoring.")
end
end
end
end
end

View file

@ -0,0 +1,29 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::PostValidator do
include_context "validators shared context"
include_context "with known author"
let(:guid) { UUID.generate(:compact) }
let(:validator) { described_class.new(input_hash, reshare) }
context "with a reshare with no root" do
let(:reshare) {
{
"entity_data" => {
"guid" => guid,
"author" => author_id,
"created_at" => "2015-01-01T22:37:29Z"
},
"entity_type" => "reshare"
}
}
it "is not valid" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to include("reshare Reshare:#{guid} doesn't have a root, ignored")
end
end
end

View file

@ -0,0 +1,55 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::PostsValidator do
include_context "validators shared context"
include_context "with known author"
let(:correct_item) {
status_message = FactoryGirl.create(:status_message)
{
"entity_data" => {
"guid" => UUID.generate(:compact),
"author" => author_id,
"root_author" => status_message.author.diaspora_handle,
"root_guid" => status_message.guid,
"created_at" => "2015-01-01T22:37:29Z"
},
"entity_type" => "reshare"
}
}
let(:correct_archive) {
{
user: {
posts: [correct_item]
}
}
}
let(:incorrect_item) {
{
"entity_data" => {
"guid" => UUID.generate(:compact),
"author" => author_id,
"created_at" => "2015-01-01T22:37:29Z"
},
"entity_type" => "reshare"
}
}
let(:archive_with_error) {
{
user: {
posts: [correct_item, incorrect_item]
}
}
}
let(:element_validator_class) {
ArchiveValidator::PostValidator
}
include_examples "a collection validator"
end

View file

@ -0,0 +1,96 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::RelayableValidator do
include_context "validators shared context"
include_context "relayable validator context"
let(:author) { FactoryGirl.create(:user).person }
context "with comment" do
let(:relayable_entity) { :comment_entity }
context "when parent is in the archive" do
before do
include_in_input_archive(
user: {
posts: [
{
"entity_type" => "status_message",
"subscribed_users_ids" => [],
"entity_data" => {
"text" => "test",
"author" => "test@example.com",
"public" => false,
"guid" => parent_guid
}
}
]
}
)
end
it_behaves_like "a relayable validator"
end
context "when parent is not in the archive" do
it "is not valid" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to eq(
["Parent entity for Comment:#{guid} is missing. Impossible to import, ignoring."]
)
end
end
end
context "with poll participation" do
let(:relayable_entity) { :poll_participation_entity }
context "when parent is in the archive" do
before do
include_in_input_archive(
user: {
posts: [
{
"entity_type" => "status_message",
"subscribed_users_ids" => [],
"entity_data" => {
"text" => "test",
"author" => "test@example.com",
"public" => false,
"guid" => "abcdef1234567890abcdef1234567890",
"poll" => {
"entity_type" => "poll",
"entity_data" => {
"guid" => parent_guid,
"question" => "question text?",
"poll_answers" => [{
"entity_type" => "poll_answer",
"entity_data" => {
"guid" => "abcdef1234567890abcdef1234567891",
"answer" => "answer text"
}
}]
}
}
}
}
]
}
)
end
it_behaves_like "a relayable validator"
end
context "when parent is not in the archive" do
it "is not valid" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to eq(
["Parent entity for PollParticipation:#{guid} is missing. Impossible to import, ignoring."]
)
end
end
end
end

View file

@ -0,0 +1,63 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::RelayablesValidator do
include_context "validators shared context"
include_context "with known author"
let(:parent_guid) { FactoryGirl.create(:status_message).guid }
let(:not_found_guid) {
UUID.generate(:compact).tap {|guid|
stub_request(:get, "http://example.net/fetch/post/#{guid}").to_return(status: 404)
}
}
let(:correct_item) {
{
"entity_type" => "like",
"entity_data" => {
"positive" => true,
"parent_type" => "Post",
"author" => "test-1@example.com",
"parent_guid" => parent_guid,
"guid" => UUID.generate(:compact)
}
}
}
let(:correct_archive) {
{
user: {
relayables: [correct_item]
}
}
}
let(:incorrect_item) {
{
"entity_type" => "like",
"entity_data" => {
"positive" => true,
"parent_type" => "Post",
"author" => "test-1@example.com",
"parent_guid" => not_found_guid,
"guid" => UUID.generate(:compact)
}
}
}
let(:archive_with_error) {
{
user: {
relayables: [correct_item, incorrect_item]
}
}
}
let(:element_validator_class) {
ArchiveValidator::OwnRelayableValidator
}
include_examples "a collection validator"
end

View file

@ -0,0 +1,15 @@
# frozen_string_literal: true
require "lib/archive_validator/shared"
describe ArchiveValidator::SchemaValidator do
include_context "validators shared context"
context "when archive doesn't match the schema" do
let(:archive_hash) { {} }
it "contains error" do
expect(validator.messages).to include("Archive schema validation failed")
end
end
end

View file

@ -0,0 +1,117 @@
# frozen_string_literal: true
require "integration/federation/federation_helper"
shared_context "validators shared context" do
let(:author_id) { author.diaspora_handle }
let(:author_pkey) { OpenSSL::PKey::RSA.generate(512) }
let(:archive_hash) { base_archive_hash }
let(:validator) { described_class.new(input_hash) }
def input_hash
Yajl::Parser.new.parse(json_file)
end
def json_file
StringIO.new(json_string)
end
def json_string
archive_hash.to_json
end
def base_archive_hash
{
user: {
profile: {
entity_type: "profile",
entity_data: {
author: author_id
}
},
username: "aaaa",
email: "aaaa@aa.com",
private_key: author_pkey.export,
contacts: [], contact_groups: [], posts: [], relayables: [], followed_tags: [], post_subscriptions: []
},
others_data: {relayables: []},
version: "2.0"
}
end
def include_in_input_archive(hash)
archive_hash.deep_merge!(hash)
end
end
shared_context "with known author" do
let(:author) { FactoryGirl.create(:person) }
end
shared_examples "validation result is valid" do
it "is valid" do
expect(validator.valid?).to be_truthy
expect(validator.messages).to be_empty
end
end
shared_context "relayable validator context" do
let(:validator) { described_class.new(input_hash, relayable) }
let(:relayable_author) {
remote_user_on_pod_b.diaspora_handle
}
let(:relayable_attributes) {
{
author: relayable_author
}
}
let(:relayable) {
Fabricate(relayable_entity, relayable_attributes).to_json.as_json
}
let(:guid) {
relayable["entity_data"]["guid"]
}
let(:parent_guid) {
relayable["entity_data"]["parent_guid"]
}
end
shared_examples "a relayable validator" do
context "with a correct comment" do
include_examples "validation result is valid"
end
context "when the comment is already known" do
let!(:original_comment) {
FactoryGirl.create(:comment, guid: guid, author: Person.by_account_identifier(relayable_author))
}
include_examples "validation result is valid"
end
end
shared_examples "a collection validator" do
context "with correct elements in the collection" do
before do
include_in_input_archive(correct_archive)
end
include_examples "validation result is valid"
end
context "with incorrect elements in the collection" do
before do
include_in_input_archive(archive_with_error)
end
it "filters collection from invalid elements" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to eq(element_validator_class.new(input_hash, incorrect_item).messages)
expect(validator.collection).to eq([correct_item])
end
end
end

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
describe ArchiveValidator do
let(:json_string) { "{}" }
let(:json_file) { StringIO.new(json_string) }
let(:archive_validator) { ArchiveValidator.new(json_file) }
describe "#validate" do
context "when bad json passed" do
let(:json_string) { "#@)g?$0" }
it "contains critical error" do
archive_validator.validate
expect(archive_validator.errors.first).to include("Bad JSON provided")
end
end
end
end

View file

@ -41,7 +41,7 @@ describe AccountMigration, type: :model do
it "raises when no private key is provided" do
expect {
account_migration.sender
}.to raise_error("can't build sender without old private key defined")
}.to raise_error("can't build sender without old private key and diaspora ID defined")
end
end
@ -146,7 +146,7 @@ describe AccountMigration, type: :model do
expect {
account_migration.perform!
}.to raise_error "can't build sender without old private key defined"
}.to raise_error "can't build sender without old private key and diaspora ID defined"
end
end
@ -226,4 +226,14 @@ describe AccountMigration, type: :model do
end
end
end
describe "#newest_person" do
let!(:second_migration) {
FactoryGirl.create(:account_migration, old_person: account_migration.new_person)
}
it "returns the newest account in the migration chain" do
expect(account_migration.newest_person).to eq(second_migration.new_person)
end
end
end

View file

@ -137,7 +137,7 @@ RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
config.include JSON::SchemaMatchers
config.json_schemas[:archive_schema] = "lib/schemas/archive-format.json"
config.json_schemas[:archive_schema] = ArchiveValidator::SchemaValidator::JSON_SCHEMA
JSON::Validator.add_schema(
JSON::Schema.new(