diaspora/spec/lib/archive_validator/shared.rb
cmrd Senya f85f167f50 Implement archive import backend
This implements archive import feature.

The feature is divided in two main subfeatures: archive validation and archive import.

Archive validation performs different validation on input user archive. This can be
used without actually running import, e.g. when user wants to check the archive
before import from the frontend. Validators may add messages and modify the archive.

Validators are separated in two types: critical validators and non-critical validators.

If validations by critical validators fail it means we can't import archive.

If non-critical validations fail, we can import archive, but some warning messages
are rendered.

Also validators may change archive contents, e.g. when some entity can't be
imported it may be removed from the archive.

Validators' job is to take away complexity from the importer and perform the validations
which are not implemented in other parts of the system, e.g. DB validations or
diaspora_federation entity validations.

Archive importer then takes the modified archive from the validator and imports it.

In order to incapsulate high-level migration logic a MigrationService is
introduced. MigrationService links ArchiveValidator, ArchiveImporter and
AccountMigration.

Also here is introduced a rake task which may be used by podmins to run archive
import.
2019-04-26 18:41:27 +03:00

117 lines
2.8 KiB
Ruby

# frozen_string_literal: true
require "integration/federation/federation_helper"
shared_context "validators shared context" do
let(:author_id) { author.diaspora_handle }
let(:author_pkey) { OpenSSL::PKey::RSA.generate(512) }
let(:archive_hash) { base_archive_hash }
let(:validator) { described_class.new(input_hash) }
def input_hash
Yajl::Parser.new.parse(json_file)
end
def json_file
StringIO.new(json_string)
end
def json_string
archive_hash.to_json
end
def base_archive_hash
{
user: {
profile: {
entity_type: "profile",
entity_data: {
author: author_id
}
},
username: "aaaa",
email: "aaaa@aa.com",
private_key: author_pkey.export,
contacts: [], contact_groups: [], posts: [], relayables: [], followed_tags: [], post_subscriptions: []
},
others_data: {relayables: []},
version: "2.0"
}
end
def include_in_input_archive(hash)
archive_hash.deep_merge!(hash)
end
end
shared_context "with known author" do
let(:author) { FactoryGirl.create(:person) }
end
shared_examples "validation result is valid" do
it "is valid" do
expect(validator.valid?).to be_truthy
expect(validator.messages).to be_empty
end
end
shared_context "relayable validator context" do
let(:validator) { described_class.new(input_hash, relayable) }
let(:relayable_author) {
remote_user_on_pod_b.diaspora_handle
}
let(:relayable_attributes) {
{
author: relayable_author
}
}
let(:relayable) {
Fabricate(relayable_entity, relayable_attributes).to_json.as_json
}
let(:guid) {
relayable["entity_data"]["guid"]
}
let(:parent_guid) {
relayable["entity_data"]["parent_guid"]
}
end
shared_examples "a relayable validator" do
context "with a correct comment" do
include_examples "validation result is valid"
end
context "when the comment is already known" do
let!(:original_comment) {
FactoryGirl.create(:comment, guid: guid, author: Person.by_account_identifier(relayable_author))
}
include_examples "validation result is valid"
end
end
shared_examples "a collection validator" do
context "with correct elements in the collection" do
before do
include_in_input_archive(correct_archive)
end
include_examples "validation result is valid"
end
context "with incorrect elements in the collection" do
before do
include_in_input_archive(archive_with_error)
end
it "filters collection from invalid elements" do
expect(validator.valid?).to be_falsey
expect(validator.messages).to eq(element_validator_class.new(input_hash, incorrect_item).messages)
expect(validator.collection).to eq([correct_item])
end
end
end