diff --git a/.github/workflows/review_pipeline.yml b/.github/workflows/review_pipeline.yml
index a1c5e1e2e..887c77673 100644
--- a/.github/workflows/review_pipeline.yml
+++ b/.github/workflows/review_pipeline.yml
@@ -52,6 +52,6 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
- msg: "Created review app at https://review.submit-social-housing-data.levellingup.gov.uk/${{ github.event.pull_request.number }}"
+ msg: "Created review app at https://review.submit-social-housing-data.communities.gov.uk/${{ github.event.pull_request.number }}"
check_for_duplicate_msg: true
duplicate_msg_pattern: Created review app at*
diff --git a/app/helpers/filters_helper.rb b/app/helpers/filters_helper.rb
index 5f8488bc9..d8a991b87 100644
--- a/app/helpers/filters_helper.rb
+++ b/app/helpers/filters_helper.rb
@@ -146,11 +146,11 @@ module FiltersHelper
end
def collection_year_radio_options
- {
- current_collection_start_year.to_s => { label: year_combo(current_collection_start_year) },
- previous_collection_start_year.to_s => { label: year_combo(previous_collection_start_year) },
- archived_collection_start_year.to_s => { label: year_combo(archived_collection_start_year) },
- }
+ options = {}
+ collection_year_options.map do |year, label|
+ options[year] = { label: }
+ end
+ options
end
def filters_applied_text(filter_type)
diff --git a/app/jobs/data_export_xml_job.rb b/app/jobs/data_export_xml_job.rb
index 8b825f6df..76ce32ec1 100644
--- a/app/jobs/data_export_xml_job.rb
+++ b/app/jobs/data_export_xml_job.rb
@@ -3,8 +3,8 @@ class DataExportXmlJob < ApplicationJob
def perform(full_update: false)
storage_service = Storage::S3Service.new(Configuration::EnvConfigurationService.new, ENV["EXPORT_BUCKET"])
- export_service = Exports::LettingsLogExportService.new(storage_service)
+ export_service = Exports::ExportService.new(storage_service)
- export_service.export_xml_lettings_logs(full_update:)
+ export_service.export_xml(full_update:)
end
end
diff --git a/app/models/export.rb b/app/models/export.rb
new file mode 100644
index 000000000..2ee04fe3d
--- /dev/null
+++ b/app/models/export.rb
@@ -0,0 +1,2 @@
+class Export < ApplicationRecord
+end
diff --git a/app/models/lettings_log.rb b/app/models/lettings_log.rb
index 7bf963212..d70f9cbff 100644
--- a/app/models/lettings_log.rb
+++ b/app/models/lettings_log.rb
@@ -59,17 +59,25 @@ class LettingsLog < Log
query.all
}
scope :search_by, lambda { |param|
- by_id = Arel.sql("CASE WHEN lettings_logs.id = #{param.to_i} THEN 0 ELSE 1 END")
- by_tenant_code = Arel.sql("CASE WHEN tenancycode = '#{param}' THEN 0 WHEN tenancycode ILIKE '%#{param}%' THEN 1 ELSE 2 END")
- by_propcode = Arel.sql("CASE WHEN propcode = '#{param}' THEN 0 WHEN propcode ILIKE '%#{param}%' THEN 1 ELSE 2 END")
- by_postcode = Arel.sql("CASE WHEN REPLACE(postcode_full, ' ', '') = '#{param.delete(' ')}' THEN 0 when REPLACE(postcode_full, ' ', '') ILIKE '%#{param.delete(' ')}%' then 1 ELSE 2 END")
+ sanitized_param = ActiveRecord::Base.sanitize_sql(param)
+ param_without_spaces = sanitized_param.delete(" ")
+
+ by_id = Arel.sql("CASE WHEN lettings_logs.id = ? THEN 0 ELSE 1 END")
+ by_tenant_code = Arel.sql("CASE WHEN tenancycode = ? THEN 0 WHEN tenancycode ILIKE ? THEN 1 ELSE 2 END")
+ by_propcode = Arel.sql("CASE WHEN propcode = ? THEN 0 WHEN propcode ILIKE ? THEN 1 ELSE 2 END")
+ by_postcode = Arel.sql("CASE WHEN REPLACE(postcode_full, ' ', '') = ? THEN 0 WHEN REPLACE(postcode_full, ' ', '') ILIKE ? THEN 1 ELSE 2 END")
filter_by_location_postcode(param)
.or(filter_by_tenant_code(param))
.or(filter_by_propcode(param))
.or(filter_by_postcode(param))
.or(filter_by_id(param.gsub(/log/i, "")))
- .order(by_id, by_tenant_code, by_propcode, by_postcode)
+ .order(
+ [by_id, sanitized_param.to_i],
+ [by_tenant_code, sanitized_param, sanitized_param],
+ [by_propcode, sanitized_param, sanitized_param],
+ [by_postcode, param_without_spaces, param_without_spaces],
+ )
}
scope :after_date, ->(date) { where("lettings_logs.startdate >= ?", date) }
scope :before_date, ->(date) { where("lettings_logs.startdate < ?", date) }
diff --git a/app/models/logs_export.rb b/app/models/logs_export.rb
deleted file mode 100644
index 0ae2e4179..000000000
--- a/app/models/logs_export.rb
+++ /dev/null
@@ -1,2 +0,0 @@
-class LogsExport < ApplicationRecord
-end
diff --git a/app/models/sales_log.rb b/app/models/sales_log.rb
index 8312b8bff..aca80ef94 100644
--- a/app/models/sales_log.rb
+++ b/app/models/sales_log.rb
@@ -46,14 +46,19 @@ class SalesLog < Log
}
scope :filter_by_purchaser_code, ->(purchid) { where("purchid ILIKE ?", "%#{purchid}%") }
scope :search_by, lambda { |param|
- by_id = Arel.sql("CASE WHEN id = #{param.to_i} THEN 0 ELSE 1 END")
- by_purchaser_code = Arel.sql("CASE WHEN purchid = '#{param}' THEN 0 WHEN purchid ILIKE '%#{param}%' THEN 1 ELSE 2 END")
- by_postcode = Arel.sql("CASE WHEN REPLACE(postcode_full, ' ', '') = '#{param.delete(' ')}' THEN 0 WHEN REPLACE(postcode_full, ' ', '') ILIKE '%#{param.delete(' ')}%' THEN 1 ELSE 2 END")
+ sanitized_param = ActiveRecord::Base.sanitize_sql(param)
+ param_without_spaces = sanitized_param.delete(" ")
+
+ by_id = Arel.sql("CASE WHEN id = ? THEN 0 ELSE 1 END")
+ by_purchaser_code = Arel.sql("CASE WHEN purchid = ? THEN 0 WHEN purchid ILIKE ? THEN 1 ELSE 2 END")
+ by_postcode = Arel.sql("CASE WHEN REPLACE(postcode_full, ' ', '') = ? THEN 0 WHEN REPLACE(postcode_full, ' ', '') ILIKE ? THEN 1 ELSE 2 END")
filter_by_purchaser_code(param)
.or(filter_by_postcode(param))
.or(filter_by_id(param.gsub(/log/i, "")))
- .order(by_id, by_purchaser_code, by_postcode)
+ .order([by_id, sanitized_param.to_i],
+ [by_purchaser_code, sanitized_param, sanitized_param],
+ [by_postcode, param_without_spaces, param_without_spaces])
}
scope :age1_answered, -> { where.not(age1: nil).or(where(age1_known: [1, 2])) }
scope :duplicate_logs, lambda { |log|
diff --git a/app/models/user.rb b/app/models/user.rb
index 0a26a254b..aa9b5a507 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -207,6 +207,10 @@ class User < ApplicationRecord
end
def assignable_roles
+ if Rails.env.staging? && Rails.application.credentials[:staging_role_update_email_allowlist].include?(email.split("@").last.downcase)
+ return ROLES
+ end
+
return {} unless data_coordinator? || support?
return ROLES if support?
diff --git a/app/services/bulk_upload/lettings/validator.rb b/app/services/bulk_upload/lettings/validator.rb
index 8c1d33fe0..4da5b2c40 100644
--- a/app/services/bulk_upload/lettings/validator.rb
+++ b/app/services/bulk_upload/lettings/validator.rb
@@ -42,14 +42,25 @@ class BulkUpload::Lettings::Validator
def create_logs?
return false if any_setup_errors?
- return false if row_parsers.any?(&:block_log_creation?)
- return false if any_logs_already_exist? && FeatureToggle.bulk_upload_duplicate_log_check_enabled?
+
+ if row_parsers.any?(&:block_log_creation?)
+ Sentry.capture_message("Bulk upload log creation blocked: #{bulk_upload.id}.")
+ return false
+ end
+
+ if any_logs_already_exist? && FeatureToggle.bulk_upload_duplicate_log_check_enabled?
+ Sentry.capture_message("Bulk upload log creation blocked due to duplicate logs: #{bulk_upload.id}.")
+ return false
+ end
row_parsers.each do |row_parser|
row_parser.log.blank_invalid_non_setup_fields!
end
- return false if any_logs_invalid?
+ if any_logs_invalid?
+ Sentry.capture_message("Bulk upload log creation blocked due to invalid logs after blanking non setup fields: #{bulk_upload.id}.")
+ return false
+ end
true
end
diff --git a/app/services/bulk_upload/sales/validator.rb b/app/services/bulk_upload/sales/validator.rb
index 32e9f7533..777424349 100644
--- a/app/services/bulk_upload/sales/validator.rb
+++ b/app/services/bulk_upload/sales/validator.rb
@@ -41,14 +41,25 @@ class BulkUpload::Sales::Validator
def create_logs?
return false if any_setup_errors?
- return false if row_parsers.any?(&:block_log_creation?)
- return false if any_logs_already_exist? && FeatureToggle.bulk_upload_duplicate_log_check_enabled?
+
+ if row_parsers.any?(&:block_log_creation?)
+ Sentry.capture_exception("Bulk upload log creation blocked: #{bulk_upload.id}.")
+ return false
+ end
+
+ if any_logs_already_exist? && FeatureToggle.bulk_upload_duplicate_log_check_enabled?
+ Sentry.capture_exception("Bulk upload log creation blocked due to duplicate logs: #{bulk_upload.id}.")
+ return false
+ end
row_parsers.each do |row_parser|
row_parser.log.blank_invalid_non_setup_fields!
end
- return false if any_logs_invalid?
+ if any_logs_invalid?
+ Sentry.capture_exception("Bulk upload log creation blocked due to invalid logs after blanking non setup fields: #{bulk_upload.id}.")
+ return false
+ end
true
end
diff --git a/app/services/exports/export_service.rb b/app/services/exports/export_service.rb
new file mode 100644
index 000000000..bf98a4edf
--- /dev/null
+++ b/app/services/exports/export_service.rb
@@ -0,0 +1,78 @@
+module Exports
+ class ExportService
+ include CollectionTimeHelper
+
+ def initialize(storage_service, logger = Rails.logger)
+ @storage_service = storage_service
+ @logger = logger
+ end
+
+ def export_xml(full_update: false, collection: nil)
+ start_time = Time.zone.now
+ daily_run_number = get_daily_run_number
+ lettings_archives_for_manifest = {}
+ users_archives_for_manifest = {}
+ organisations_archives_for_manifest = {}
+
+ if collection.present?
+ case collection
+ when "users"
+ users_archives_for_manifest = get_user_archives(start_time, full_update)
+ when "organisations"
+ organisations_archives_for_manifest = get_organisation_archives(start_time, full_update)
+ else
+ lettings_archives_for_manifest = get_lettings_archives(start_time, full_update, collection)
+ end
+ else
+ users_archives_for_manifest = get_user_archives(start_time, full_update)
+ organisations_archives_for_manifest = get_organisation_archives(start_time, full_update)
+ lettings_archives_for_manifest = get_lettings_archives(start_time, full_update, collection)
+ end
+
+ write_master_manifest(daily_run_number, lettings_archives_for_manifest.merge(users_archives_for_manifest).merge(organisations_archives_for_manifest))
+ end
+
+ private
+
+ def get_daily_run_number
+ today = Time.zone.today
+ Export.where(created_at: today.beginning_of_day..today.end_of_day).select(:started_at).distinct.count + 1
+ end
+
+ def write_master_manifest(daily_run, archive_datetimes)
+ today = Time.zone.today
+ increment_number = daily_run.to_s.rjust(4, "0")
+ month = today.month.to_s.rjust(2, "0")
+ day = today.day.to_s.rjust(2, "0")
+ file_path = "Manifest_#{today.year}_#{month}_#{day}_#{increment_number}.csv"
+ string_io = build_manifest_csv_io(archive_datetimes)
+ @storage_service.write_file(file_path, string_io)
+ end
+
+ def build_manifest_csv_io(archive_datetimes)
+ headers = ["zip-name", "date-time zipped folder generated", "zip-file-uri"]
+ csv_string = CSV.generate do |csv|
+ csv << headers
+ archive_datetimes.each do |(archive, datetime)|
+ csv << [archive, datetime, "#{archive}.zip"]
+ end
+ end
+ StringIO.new(csv_string)
+ end
+
+ def get_user_archives(start_time, full_update)
+ users_export_service = Exports::UserExportService.new(@storage_service, start_time)
+ users_export_service.export_xml_users(full_update:)
+ end
+
+ def get_organisation_archives(start_time, full_update)
+ organisations_export_service = Exports::OrganisationExportService.new(@storage_service, start_time)
+ organisations_export_service.export_xml_organisations(full_update:)
+ end
+
+ def get_lettings_archives(start_time, full_update, collection)
+ lettings_export_service = Exports::LettingsLogExportService.new(@storage_service, start_time)
+ lettings_export_service.export_xml_lettings_logs(full_update:, collection_year: collection)
+ end
+ end
+end
diff --git a/app/services/exports/lettings_log_export_service.rb b/app/services/exports/lettings_log_export_service.rb
index 0c3b6eec4..b21099a06 100644
--- a/app/services/exports/lettings_log_export_service.rb
+++ b/app/services/exports/lettings_log_export_service.rb
@@ -1,22 +1,15 @@
module Exports
- class LettingsLogExportService
+ class LettingsLogExportService < Exports::XmlExportService
include Exports::LettingsLogExportConstants
include CollectionTimeHelper
- def initialize(storage_service, logger = Rails.logger)
- @storage_service = storage_service
- @logger = logger
- end
-
def export_xml_lettings_logs(full_update: false, collection_year: nil)
- start_time = Time.zone.now
- daily_run_number = get_daily_run_number
archives_for_manifest = {}
- recent_export = LogsExport.order("started_at").last
collection_years_to_export(collection_year).each do |collection|
- base_number = LogsExport.where(empty_export: false, collection:).maximum(:base_number) || 1
- export = build_export_run(collection, start_time, base_number, full_update)
- archives = write_export_archive(export, collection, start_time, recent_export, full_update)
+ recent_export = Export.where(collection:).order("started_at").last
+ base_number = Export.where(empty_export: false, collection:).maximum(:base_number) || 1
+ export = build_export_run(collection, base_number, full_update)
+ archives = write_export_archive(export, collection, recent_export, full_update)
archives_for_manifest.merge!(archives)
@@ -24,46 +17,11 @@ module Exports
export.save!
end
- write_master_manifest(daily_run_number, archives_for_manifest)
+ archives_for_manifest
end
private
- def get_daily_run_number
- today = Time.zone.today
- LogsExport.where(created_at: today.beginning_of_day..today.end_of_day).select(:started_at).distinct.count + 1
- end
-
- def build_export_run(collection, current_time, base_number, full_update)
- @logger.info("Building export run for #{collection}")
- previous_exports_with_data = LogsExport.where(collection:, empty_export: false)
-
- increment_number = previous_exports_with_data.where(base_number:).maximum(:increment_number) || 1
-
- if full_update
- base_number += 1 if LogsExport.any? # Only increment when it's not the first run
- increment_number = 1
- else
- increment_number += 1
- end
-
- if previous_exports_with_data.empty?
- return LogsExport.new(collection:, base_number:, started_at: current_time)
- end
-
- LogsExport.new(collection:, started_at: current_time, base_number:, increment_number:)
- end
-
- def write_master_manifest(daily_run, archive_datetimes)
- today = Time.zone.today
- increment_number = daily_run.to_s.rjust(4, "0")
- month = today.month.to_s.rjust(2, "0")
- day = today.day.to_s.rjust(2, "0")
- file_path = "Manifest_#{today.year}_#{month}_#{day}_#{increment_number}.csv"
- string_io = build_manifest_csv_io(archive_datetimes)
- @storage_service.write_file(file_path, string_io)
- end
-
def get_archive_name(collection, base_number, increment)
return unless collection
@@ -72,88 +30,14 @@ module Exports
"core_#{collection}_#{collection + 1}_apr_mar_#{base_number_str}_#{increment_str}".downcase
end
- def write_export_archive(export, collection, start_time, recent_export, full_update)
- archive = get_archive_name(collection, export.base_number, export.increment_number) # archive name would be the same for all logs because they're already filtered by year (?)
-
- initial_logs_count = retrieve_lettings_logs(start_time, recent_export, full_update).filter_by_year(collection).count
- @logger.info("Creating #{archive} - #{initial_logs_count} logs")
- return {} if initial_logs_count.zero?
-
- zip_file = Zip::File.open_buffer(StringIO.new)
-
- part_number = 1
- last_processed_marker = nil
- logs_count_after_export = 0
-
- loop do
- lettings_logs_slice = if last_processed_marker.present?
- retrieve_lettings_logs(start_time, recent_export, full_update).filter_by_year(collection)
- .where("created_at > ?", last_processed_marker)
- .order(:created_at)
- .limit(MAX_XML_RECORDS).to_a
- else
- retrieve_lettings_logs(start_time, recent_export, full_update).filter_by_year(collection)
- .order(:created_at)
- .limit(MAX_XML_RECORDS).to_a
- end
-
- break if lettings_logs_slice.empty?
-
- data_xml = build_export_xml(lettings_logs_slice)
- part_number_str = "pt#{part_number.to_s.rjust(3, '0')}"
- zip_file.add("#{archive}_#{part_number_str}.xml", data_xml)
- part_number += 1
- last_processed_marker = lettings_logs_slice.last.created_at
- logs_count_after_export += lettings_logs_slice.count
- @logger.info("Added #{archive}_#{part_number_str}.xml")
- end
-
- manifest_xml = build_manifest_xml(logs_count_after_export)
- zip_file.add("manifest.xml", manifest_xml)
-
- # Required by S3 to avoid Aws::S3::Errors::BadDigest
- zip_io = zip_file.write_buffer
- zip_io.rewind
- @logger.info("Writing #{archive}.zip")
- @storage_service.write_file("#{archive}.zip", zip_io)
- { archive => Time.zone.now }
- end
-
- def retrieve_lettings_logs(start_time, recent_export, full_update)
+ def retrieve_resources(recent_export, full_update, collection)
if !full_update && recent_export
- params = { from: recent_export.started_at, to: start_time }
- LettingsLog.exportable.where("(updated_at >= :from AND updated_at <= :to) OR (values_updated_at IS NOT NULL AND values_updated_at >= :from AND values_updated_at <= :to)", params)
+ params = { from: recent_export.started_at, to: @start_time }
+ LettingsLog.exportable.where("(updated_at >= :from AND updated_at <= :to) OR (values_updated_at IS NOT NULL AND values_updated_at >= :from AND values_updated_at <= :to)", params).filter_by_year(collection)
else
- params = { to: start_time }
- LettingsLog.exportable.where("updated_at <= :to", params)
- end
- end
-
- def build_manifest_csv_io(archive_datetimes)
- headers = ["zip-name", "date-time zipped folder generated", "zip-file-uri"]
- csv_string = CSV.generate do |csv|
- csv << headers
- archive_datetimes.each do |(archive, datetime)|
- csv << [archive, datetime, "#{archive}.zip"]
- end
+ params = { to: @start_time }
+ LettingsLog.exportable.where("updated_at <= :to", params).filter_by_year(collection)
end
- StringIO.new(csv_string)
- end
-
- def xml_doc_to_temp_file(xml_doc)
- file = Tempfile.new
- xml_doc.write_xml_to(file, encoding: "UTF-8")
- file.rewind
- file
- end
-
- def build_manifest_xml(record_number)
- doc = Nokogiri::XML("")
- doc.at("report") << doc.create_element("form-data-summary")
- doc.at("form-data-summary") << doc.create_element("records")
- doc.at("records") << doc.create_element("count-of-records", record_number)
-
- xml_doc_to_temp_file(doc)
end
def apply_cds_transformation(lettings_log, export_mode)
diff --git a/app/services/exports/organisation_export_constants.rb b/app/services/exports/organisation_export_constants.rb
new file mode 100644
index 000000000..3a1c5fb48
--- /dev/null
+++ b/app/services/exports/organisation_export_constants.rb
@@ -0,0 +1,27 @@
+module Exports::OrganisationExportConstants
+ MAX_XML_RECORDS = 10_000
+
+ EXPORT_FIELDS = Set[
+ "id",
+ "name",
+ "phone",
+ "provider_type",
+ "address_line1",
+ "address_line2",
+ "postcode",
+ "holds_own_stock",
+ "housing_registration_no",
+ "active",
+ "old_org_id",
+ "old_visible_id",
+ "merge_date",
+ "absorbing_organisation_id",
+ "available_from",
+ "deleted_at",
+ "dsa_signed",
+ "dsa_signed_at",
+ "dpo_email",
+ "profit_status",
+ "group"
+ ]
+end
diff --git a/app/services/exports/organisation_export_service.rb b/app/services/exports/organisation_export_service.rb
new file mode 100644
index 000000000..71eccf60a
--- /dev/null
+++ b/app/services/exports/organisation_export_service.rb
@@ -0,0 +1,72 @@
+module Exports
+ class OrganisationExportService < Exports::XmlExportService
+ include Exports::OrganisationExportConstants
+ include CollectionTimeHelper
+
+ def export_xml_organisations(full_update: false)
+ collection = "organisations"
+ recent_export = Export.where(collection:).order("started_at").last
+
+ base_number = Export.where(empty_export: false, collection:).maximum(:base_number) || 1
+ export = build_export_run(collection, base_number, full_update)
+ archives_for_manifest = write_export_archive(export, collection, recent_export, full_update)
+
+ export.empty_export = archives_for_manifest.empty?
+ export.save!
+
+ archives_for_manifest
+ end
+
+ private
+
+ def get_archive_name(collection, base_number, increment)
+ return unless collection
+
+ base_number_str = "f#{base_number.to_s.rjust(4, '0')}"
+ increment_str = "inc#{increment.to_s.rjust(4, '0')}"
+ "#{collection}_2024_2025_apr_mar_#{base_number_str}_#{increment_str}".downcase
+ end
+
+ def retrieve_resources(recent_export, full_update, _collection)
+ if !full_update && recent_export
+ params = { from: recent_export.started_at, to: @start_time }
+ Organisation.where("(updated_at >= :from AND updated_at <= :to)", params)
+ else
+ params = { to: @start_time }
+ Organisation.where("updated_at <= :to", params)
+ end
+ end
+
+ def build_export_xml(organisations)
+ doc = Nokogiri::XML("")
+
+ organisations.each do |organisation|
+ attribute_hash = apply_cds_transformation(organisation)
+ form = doc.create_element("form")
+ doc.at("forms") << form
+ attribute_hash.each do |key, value|
+ if !EXPORT_FIELDS.include?(key)
+ next
+ else
+ form << doc.create_element(key, value)
+ end
+ end
+ end
+
+ xml_doc_to_temp_file(doc)
+ end
+
+ def apply_cds_transformation(organisation)
+ attribute_hash = organisation.attributes
+ attribute_hash["deleted_at"] = organisation.discarded_at
+ attribute_hash["dsa_signed"] = organisation.data_protection_confirmed?
+ attribute_hash["dsa_signed_at"] = organisation.data_protection_confirmation&.signed_at
+ attribute_hash["dpo_email"] = organisation.data_protection_confirmation&.data_protection_officer_email
+ attribute_hash["provider_type"] = organisation.provider_type_before_type_cast
+ attribute_hash["profit_status"] = nil # will need update when we add the field to the org
+ attribute_hash["group"] = nil # will need update when we add the field to the org
+
+ attribute_hash
+ end
+ end
+end
diff --git a/app/services/exports/user_export_constants.rb b/app/services/exports/user_export_constants.rb
new file mode 100644
index 000000000..9ce5840d9
--- /dev/null
+++ b/app/services/exports/user_export_constants.rb
@@ -0,0 +1,18 @@
+module Exports::UserExportConstants
+ MAX_XML_RECORDS = 10_000
+
+ EXPORT_FIELDS = Set[
+ "id",
+ "email",
+ "name",
+ "phone",
+ "organisation_id",
+ "organisation_name",
+ "role",
+ "is_dpo",
+ "is_key_contact",
+ "active",
+ "sign_in_count",
+ "last_sign_in_at",
+ ]
+end
diff --git a/app/services/exports/user_export_service.rb b/app/services/exports/user_export_service.rb
new file mode 100644
index 000000000..907a1cc86
--- /dev/null
+++ b/app/services/exports/user_export_service.rb
@@ -0,0 +1,68 @@
+module Exports
+ class UserExportService < Exports::XmlExportService
+ include Exports::UserExportConstants
+ include CollectionTimeHelper
+
+ def export_xml_users(full_update: false)
+ collection = "users"
+ recent_export = Export.where(collection:).order("started_at").last
+
+ base_number = Export.where(empty_export: false, collection:).maximum(:base_number) || 1
+ export = build_export_run(collection, base_number, full_update)
+ archives_for_manifest = write_export_archive(export, collection, recent_export, full_update)
+
+ export.empty_export = archives_for_manifest.empty?
+ export.save!
+
+ archives_for_manifest
+ end
+
+ private
+
+ def get_archive_name(collection, base_number, increment)
+ return unless collection
+
+ base_number_str = "f#{base_number.to_s.rjust(4, '0')}"
+ increment_str = "inc#{increment.to_s.rjust(4, '0')}"
+ "#{collection}_2024_2025_apr_mar_#{base_number_str}_#{increment_str}".downcase
+ end
+
+ def retrieve_resources(recent_export, full_update, _collection)
+ if !full_update && recent_export
+ params = { from: recent_export.started_at, to: @start_time }
+ User.where("(updated_at >= :from AND updated_at <= :to)", params)
+ else
+ params = { to: @start_time }
+ User.where("updated_at <= :to", params)
+ end
+ end
+
+ def build_export_xml(users)
+ doc = Nokogiri::XML("")
+
+ users.each do |user|
+ attribute_hash = apply_cds_transformation(user)
+ form = doc.create_element("form")
+ doc.at("forms") << form
+ attribute_hash.each do |key, value|
+ if !EXPORT_FIELDS.include?(key)
+ next
+ else
+ form << doc.create_element(key, value)
+ end
+ end
+ end
+
+ xml_doc_to_temp_file(doc)
+ end
+
+ def apply_cds_transformation(user)
+ attribute_hash = user.attributes_before_type_cast
+ attribute_hash["role"] = user.role
+ attribute_hash["organisation_name"] = user.organisation.name
+ attribute_hash["active"] = user.active?
+ attribute_hash["phone"] = [user.phone, user.phone_extension].compact.join(" ")
+ attribute_hash
+ end
+ end
+end
diff --git a/app/services/exports/xml_export_service.rb b/app/services/exports/xml_export_service.rb
new file mode 100644
index 000000000..009a1b306
--- /dev/null
+++ b/app/services/exports/xml_export_service.rb
@@ -0,0 +1,97 @@
+module Exports
+ class XmlExportService
+ include Exports::LettingsLogExportConstants
+ include CollectionTimeHelper
+
+ def initialize(storage_service, start_time, logger = Rails.logger)
+ @storage_service = storage_service
+ @logger = logger
+ @start_time = start_time
+ end
+
+ private
+
+ def build_export_run(collection, base_number, full_update)
+ @logger.info("Building export run for #{collection}")
+ previous_exports_with_data = Export.where(collection:, empty_export: false)
+
+ increment_number = previous_exports_with_data.where(base_number:).maximum(:increment_number) || 1
+
+ if full_update
+ base_number += 1 if Export.any? # Only increment when it's not the first run
+ increment_number = 1
+ else
+ increment_number += 1
+ end
+
+ if previous_exports_with_data.empty?
+ return Export.new(collection:, base_number:, started_at: @start_time)
+ end
+
+ Export.new(collection:, started_at: @start_time, base_number:, increment_number:)
+ end
+
+ def write_export_archive(export, collection, recent_export, full_update)
+ archive = get_archive_name(collection, export.base_number, export.increment_number) # archive name would be the same for all logs because they're already filtered by year (?)
+
+ initial_count = retrieve_resources(recent_export, full_update, collection).count
+ @logger.info("Creating #{archive} - #{initial_count} resources")
+ return {} if initial_count.zero?
+
+ zip_file = Zip::File.open_buffer(StringIO.new)
+
+ part_number = 1
+ last_processed_marker = nil
+ count_after_export = 0
+
+ loop do
+ slice = if last_processed_marker.present?
+ retrieve_resources(recent_export, full_update, collection)
+ .where("created_at > ?", last_processed_marker)
+ .order(:created_at)
+ .limit(MAX_XML_RECORDS).to_a
+ else
+ retrieve_resources(recent_export, full_update, collection)
+ .order(:created_at)
+ .limit(MAX_XML_RECORDS).to_a
+ end
+
+ break if slice.empty?
+
+ data_xml = build_export_xml(slice)
+ part_number_str = "pt#{part_number.to_s.rjust(3, '0')}"
+ zip_file.add("#{archive}_#{part_number_str}.xml", data_xml)
+ part_number += 1
+ last_processed_marker = slice.last.created_at
+ count_after_export += slice.count
+ @logger.info("Added #{archive}_#{part_number_str}.xml")
+ end
+
+ manifest_xml = build_manifest_xml(count_after_export)
+ zip_file.add("manifest.xml", manifest_xml)
+
+ # Required by S3 to avoid Aws::S3::Errors::BadDigest
+ zip_io = zip_file.write_buffer
+ zip_io.rewind
+ @logger.info("Writing #{archive}.zip")
+ @storage_service.write_file("#{archive}.zip", zip_io)
+ { archive => Time.zone.now }
+ end
+
+ def xml_doc_to_temp_file(xml_doc)
+ file = Tempfile.new
+ xml_doc.write_xml_to(file, encoding: "UTF-8")
+ file.rewind
+ file
+ end
+
+ def build_manifest_xml(record_number)
+ doc = Nokogiri::XML("")
+ doc.at("report") << doc.create_element("form-data-summary")
+ doc.at("form-data-summary") << doc.create_element("records")
+ doc.at("records") << doc.create_element("count-of-records", record_number)
+
+ xml_doc_to_temp_file(doc)
+ end
+ end
+end
diff --git a/config/credentials.yml.enc b/config/credentials.yml.enc
index c9d564782..9cd4bba71 100644
--- a/config/credentials.yml.enc
+++ b/config/credentials.yml.enc
@@ -1 +1 @@
-EZNV2LiNWzf52erbQ41Dz3Bh+2f3Uih8liEyhXp5XzHCLzAbmN6/IJqr7b9cTZiCiroFo4n/dFoG3yYrospp3frKsDXxF1K2/MTCJWjpgnn7wc+HiPQWG0W3HRtQCNkyyrHes0YKcYyDWIP6kztYv1I/Me3p0pGEx6t3CpSTg1v46eRnOlDWiUz3rVxPauwq9IYZ75gmnThqvg/Z8wcYsWLx0arago0SXtRPASCNj4uO/lbqTcAfyIXOTSiOlcAIjoPFRSQY7UqY0o2p8jRR/1L16SmGDsk8ijm+UygNmMexa3Khy5WcKctpQICakHs4NRjHNflqgXpXKL9dVBmNc9d7h+gbhbGJQ53Y0d+a35UbhPRMiv4SRH98FwB+WEsLCDdGSHvdmM6ArfOLljTrqrsmSRf0JfUrvzyVYmMCxjv4xgJwUS/TD5lQD1yPwkp2ss00kQJqzNmB7qwFhA8a3e2iNzV8qtAV/Nj+tMlr99Hb7vZZs98/38G2p5RAsE/5Xl9taKhc/ACnVc/bwJND4JWaBB7duCa08xVB8nkjlt5cCwMurzAcy1ZT+e8JepR+g6s8fpScMEWVJXE0hd8=--rZ41rY9TMXmiBUJw--QiLRVNVXZzTW446s7cec1g==
\ No newline at end of file
+QGn9IiI91BaO4IGAtfy92FrNP46X9T2jJErRv+o/PRG9LrimEGeuOE+FwhArKZQ5cTipaDqo8u9Ajv45Kitv3c0GynOOvz0r3OjPRHO/p4hW8BFWQDv581cWWPsyZT2JO51zZ5LnwNFvWrjEB2q49YESgtfADPkJWmtx/By5Cg2/PVIRxvhGKOnheme5cih050wqg/43BdiF0PD9FDTZXJDLJg/QQ8nQYkvQe2jN4nM4mTVpkQkmzDKgGknmUWFfW3qWFzlsdMkdkPdeP9wLnJVbFTeyaaJT3wv6l19d2rKqo8iVvacdaQjRev+LVXqOsNAjVHwcPNQVq9s8pxG24HLk3aQ14Eyjf6tHAuZAV4jLnNqQtBQ0AIldWeOl6SKmlTom1P1tcLp9KpajEADplmWSwUktIGmaakFjk/ApYaUBiYTku2iLHMrT/xSc3jPj5W/ZggeJ0Ij6nuGYE1cmBxWGxda9PzOrDP8coEK9vPHiNeDDM1RoukVmf8gwDmshILi5EwIAsO2gJXM1wtPYMu41+H4/y3c0GIwgfv9QP11q+nqhG1MMcOrAUKGhypAS+M+uLwfGQudfQDKP9Zv3VCnOk3mkKlpIzMMD4UdJxQeE/8sfwIsEhWggEo3oa93ptbRdvJ7YYcVvmMmkVBxk0KWFprl4i/BkFHLWrKNl5LBOGA==--ziMOTnYBB5TDyXYU--3FJMs8e6R8lheqcqB8p8uQ==
\ No newline at end of file
diff --git a/db/migrate/20240802093255_rename_export_table.rb b/db/migrate/20240802093255_rename_export_table.rb
new file mode 100644
index 000000000..68c14d0a9
--- /dev/null
+++ b/db/migrate/20240802093255_rename_export_table.rb
@@ -0,0 +1,5 @@
+class RenameExportTable < ActiveRecord::Migration[7.0]
+ def change
+ rename_table :logs_exports, :exports
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 4d6f18b84..ff1f913df 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -78,6 +78,15 @@ ActiveRecord::Schema[7.0].define(version: 2024_09_23_145326) do
t.index ["organisation_id"], name: "index_data_protection_confirmations_on_organisation_id"
end
+ create_table "exports", force: :cascade do |t|
+ t.datetime "created_at", default: -> { "CURRENT_TIMESTAMP" }
+ t.datetime "started_at", null: false
+ t.integer "base_number", default: 1, null: false
+ t.integer "increment_number", default: 1, null: false
+ t.boolean "empty_export", default: false, null: false
+ t.string "collection"
+ end
+
create_table "la_rent_ranges", force: :cascade do |t|
t.integer "ranges_rent_id"
t.integer "lettype"
@@ -414,15 +423,6 @@ ActiveRecord::Schema[7.0].define(version: 2024_09_23_145326) do
t.boolean "checked"
end
- create_table "logs_exports", force: :cascade do |t|
- t.datetime "created_at", default: -> { "CURRENT_TIMESTAMP" }
- t.datetime "started_at", null: false
- t.integer "base_number", default: 1, null: false
- t.integer "increment_number", default: 1, null: false
- t.boolean "empty_export", default: false, null: false
- t.string "collection"
- end
-
create_table "merge_request_organisations", force: :cascade do |t|
t.integer "merge_request_id"
t.integer "merging_organisation_id"
diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock
index 2a406a953..8270007c8 100644
--- a/docs/Gemfile.lock
+++ b/docs/Gemfile.lock
@@ -253,7 +253,7 @@ GEM
unf_ext
unf_ext (0.0.8.2)
unicode-display_width (1.8.0)
- webrick (1.8.1)
+ webrick (1.8.2)
PLATFORMS
arm64-darwin-21
diff --git a/lib/tasks/data_export.rake b/lib/tasks/data_export.rake
index 719462cb4..7a9a90bc8 100644
--- a/lib/tasks/data_export.rake
+++ b/lib/tasks/data_export.rake
@@ -7,11 +7,13 @@ namespace :core do
end
desc "Export all data XMLs for import into Central Data System (CDS)"
- task :full_data_export_xml, %i[year] => :environment do |_task, args|
- collection_year = args[:year].present? ? args[:year].to_i : nil
+ task :full_data_export_xml, %i[collection] => :environment do |_task, args|
+ collection = args[:collection].presence
+ collection = collection.to_i if collection.present? && collection.scan(/\D/).empty?
+
storage_service = Storage::S3Service.new(Configuration::EnvConfigurationService.new, ENV["EXPORT_BUCKET"])
- export_service = Exports::LettingsLogExportService.new(storage_service)
+ export_service = Exports::ExportService.new(storage_service)
- export_service.export_xml_lettings_logs(full_update: true, collection_year:)
+ export_service.export_xml(full_update: true, collection:)
end
end
diff --git a/spec/fixtures/exports/organisation.xml b/spec/fixtures/exports/organisation.xml
new file mode 100644
index 000000000..8d87da16c
--- /dev/null
+++ b/spec/fixtures/exports/organisation.xml
@@ -0,0 +1,26 @@
+
+
+
+
diff --git a/spec/fixtures/exports/user.xml b/spec/fixtures/exports/user.xml
new file mode 100644
index 000000000..5652ac9c6
--- /dev/null
+++ b/spec/fixtures/exports/user.xml
@@ -0,0 +1,17 @@
+
+
+
+
diff --git a/spec/helpers/filters_helper_spec.rb b/spec/helpers/filters_helper_spec.rb
index a2b658cdf..58c82f0c9 100644
--- a/spec/helpers/filters_helper_spec.rb
+++ b/spec/helpers/filters_helper_spec.rb
@@ -527,6 +527,56 @@ RSpec.describe FiltersHelper do
end
end
+ describe "#collection_year_radio_options" do
+ context "with 23/24 as the current collection year" do
+ before do
+ allow(Time).to receive(:now).and_return(Time.zone.local(2023, 5, 1))
+ end
+
+ context "and in crossover period" do
+ before do
+ allow(FormHandler.instance).to receive(:in_crossover_period?).and_return(true)
+ end
+
+ it "has the correct options" do
+ expect(collection_year_radio_options).to eq(
+ {
+ "2023" => { label: "2023/24" }, "2022" => { label: "2022/23" }, "2021" => { label: "2021/22" }
+ },
+ )
+ end
+ end
+
+ context "and not in crossover period" do
+ before do
+ allow(FormHandler.instance).to receive(:in_crossover_period?).and_return(false)
+ end
+
+ it "has the correct options" do
+ expect(collection_year_radio_options).to eq(
+ {
+ "2023" => { label: "2023/24" }, "2022" => { label: "2022/23" }
+ },
+ )
+ end
+ end
+ end
+
+ context "with 24/25 as the current collection year" do
+ before do
+ allow(Time).to receive(:now).and_return(Time.zone.local(2024, 5, 1))
+ end
+
+ it "has the correct options" do
+ expect(collection_year_radio_options).to eq(
+ {
+ "2024" => { label: "2024/25" }, "2023" => { label: "2023/24" }, "2022" => { label: "2022/23" }
+ },
+ )
+ end
+ end
+ end
+
describe "#filters_applied_text" do
let(:filter_type) { "lettings_logs" }
let(:result) { filters_applied_text(filter_type) }
diff --git a/spec/jobs/data_export_xml_job_spec.rb b/spec/jobs/data_export_xml_job_spec.rb
index c029dad71..3712e115c 100644
--- a/spec/jobs/data_export_xml_job_spec.rb
+++ b/spec/jobs/data_export_xml_job_spec.rb
@@ -1,25 +1,33 @@
require "rails_helper"
describe DataExportXmlJob do
- let(:storage_service) { instance_double(Storage::S3Service) }
+ let(:storage_service) { instance_double(Storage::S3Service, write_file: nil) }
let(:env_config_service) { instance_double(Configuration::EnvConfigurationService) }
- let(:export_service) { instance_double(Exports::LettingsLogExportService) }
+ let(:lettings_export_service) { instance_double(Exports::LettingsLogExportService, export_xml_lettings_logs: {}) }
+ let(:user_export_service) { instance_double(Exports::UserExportService, export_xml_users: {}) }
+ let(:organisation_export_service) { instance_double(Exports::OrganisationExportService, export_xml_organisations: {}) }
before do
allow(Storage::S3Service).to receive(:new).and_return(storage_service)
allow(Configuration::EnvConfigurationService).to receive(:new).and_return(env_config_service)
- allow(Exports::LettingsLogExportService).to receive(:new).and_return(export_service)
+ allow(Exports::LettingsLogExportService).to receive(:new).and_return(lettings_export_service)
+ allow(Exports::UserExportService).to receive(:new).and_return(user_export_service)
+ allow(Exports::OrganisationExportService).to receive(:new).and_return(organisation_export_service)
end
- it "calls the export service" do
- expect(export_service).to receive(:export_xml_lettings_logs)
+ it "calls the export services" do
+ expect(lettings_export_service).to receive(:export_xml_lettings_logs)
+ expect(user_export_service).to receive(:export_xml_users)
+ expect(organisation_export_service).to receive(:export_xml_organisations)
described_class.perform_now
end
context "with full update enabled" do
it "calls the export service" do
- expect(export_service).to receive(:export_xml_lettings_logs).with(full_update: true)
+ expect(lettings_export_service).to receive(:export_xml_lettings_logs).with(full_update: true, collection_year: nil)
+ expect(user_export_service).to receive(:export_xml_users).with(full_update: true)
+ expect(organisation_export_service).to receive(:export_xml_organisations).with(full_update: true)
described_class.perform_now(full_update: true)
end
diff --git a/spec/lib/tasks/data_export_spec.rb b/spec/lib/tasks/data_export_spec.rb
index afb99f872..8b5dd5fbe 100644
--- a/spec/lib/tasks/data_export_spec.rb
+++ b/spec/lib/tasks/data_export_spec.rb
@@ -4,7 +4,7 @@ require "rake"
describe "rake core:data_export", type: task do
let(:export_bucket) { "export_bucket" }
let(:storage_service) { instance_double(Storage::S3Service) }
- let(:export_service) { instance_double(Exports::LettingsLogExportService) }
+ let(:export_service) { instance_double(Exports::ExportService) }
before do
Rake.application.rake_require("tasks/data_export")
@@ -12,7 +12,7 @@ describe "rake core:data_export", type: task do
task.reenable
allow(Storage::S3Service).to receive(:new).and_return(storage_service)
- allow(Exports::LettingsLogExportService).to receive(:new).and_return(export_service)
+ allow(Exports::ExportService).to receive(:new).and_return(export_service)
allow(ENV).to receive(:[])
allow(ENV).to receive(:[]).with("EXPORT_BUCKET").and_return(export_bucket)
end
@@ -30,15 +30,15 @@ describe "rake core:data_export", type: task do
context "with all available years" do
it "calls the export service" do
- expect(export_service).to receive(:export_xml_lettings_logs).with(full_update: true, collection_year: nil)
+ expect(export_service).to receive(:export_xml).with(full_update: true, collection: nil)
task.invoke
end
end
- context "with a specific year" do
+ context "with a specific collection" do
it "calls the export service" do
- expect(export_service).to receive(:export_xml_lettings_logs).with(full_update: true, collection_year: 2022)
+ expect(export_service).to receive(:export_xml).with(full_update: true, collection: 2022)
task.invoke("2022")
end
diff --git a/spec/models/lettings_log_spec.rb b/spec/models/lettings_log_spec.rb
index 92452f197..e28f0f2c5 100644
--- a/spec/models/lettings_log_spec.rb
+++ b/spec/models/lettings_log_spec.rb
@@ -1345,6 +1345,13 @@ RSpec.describe LettingsLog do
expect(result.third.id).to eq lettings_log_with_postcode.id
end
end
+
+ it "sanitises input for order" do
+ lettings_log_to_search.update!(tenancycode: "' 1234")
+ result = described_class.search_by(lettings_log_to_search.tenancycode)
+ expect(result.count).to eq(1)
+ expect(result.first.id).to eq lettings_log_to_search.id
+ end
end
end
diff --git a/spec/models/sales_log_spec.rb b/spec/models/sales_log_spec.rb
index a568ca330..ae9b00d4c 100644
--- a/spec/models/sales_log_spec.rb
+++ b/spec/models/sales_log_spec.rb
@@ -214,6 +214,13 @@ RSpec.describe SalesLog, type: :model do
expect(result).to include(have_attributes(id: sales_log_to_search.id))
end
end
+
+ it "sanitises input for order" do
+ sales_log_to_search.update!(purchid: "' 123456")
+ result = described_class.search_by(sales_log_to_search.purchid)
+ expect(result.count).to be >= 1
+ expect(result).to include(have_attributes(id: sales_log_to_search.id))
+ end
end
context "when filtering by year or nil" do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 5cb6cb580..beb3d589e 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -243,6 +243,128 @@ RSpec.describe User, type: :model do
expect(user.need_two_factor_authentication?(nil)).to be false
end
end
+
+ context "when the user is in non staging environment" do
+ before do
+ allow(Rails.env).to receive(:staging?).and_return(false)
+ end
+
+ context "and the user is in the staging role update email allowlist" do
+ before do
+ allow(Rails.application.credentials).to receive(:[]).with(:staging_role_update_email_allowlist).and_return(["example.com"])
+ end
+
+ context "when the user is a data provider" do
+ it "cannot assign roles" do
+ expect(user.assignable_roles).to eq({})
+ end
+ end
+
+ context "when the user is a data coordinator" do
+ let(:user) { create(:user, :data_coordinator) }
+
+ it "can assign all roles except support" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ })
+ end
+ end
+
+ context "when the user is a Support user" do
+ let(:user) { create(:user, :support) }
+
+ it "can assign all roles" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ support: 99,
+ })
+ end
+ end
+ end
+ end
+
+ context "when the user is in staging environment" do
+ before do
+ allow(Rails.env).to receive(:staging?).and_return(true)
+ end
+
+ context "and the user is not in the staging role update email allowlist" do
+ context "when the user is a data provider" do
+ let(:user) { create(:user, :data_provider) }
+
+ it "cannot assign roles" do
+ expect(user.assignable_roles).to eq({})
+ end
+ end
+
+ context "when the user is a data coordinator" do
+ let(:user) { create(:user, :data_coordinator) }
+
+ it "can assign all roles except support" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ })
+ end
+ end
+
+ context "when the user is a Support user" do
+ let(:user) { create(:user, :support) }
+
+ it "can assign all roles" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ support: 99,
+ })
+ end
+ end
+ end
+
+ context "and the user is in the staging role update email allowlist" do
+ before do
+ allow(Rails.application.credentials).to receive(:[]).with(:staging_role_update_email_allowlist).and_return(["example.com"])
+ end
+
+ context "when the user is a data provider" do
+ let(:user) { create(:user, :data_provider) }
+
+ it "can assign all roles" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ support: 99,
+ })
+ end
+ end
+
+ context "when the user is a data coordinator" do
+ let(:user) { create(:user, :data_coordinator) }
+
+ it "can assign all roles" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ support: 99,
+ })
+ end
+ end
+
+ context "when the user is a Support user" do
+ let(:user) { create(:user, :support) }
+
+ it "can assign all roles" do
+ expect(user.assignable_roles).to eq({
+ data_provider: 1,
+ data_coordinator: 2,
+ support: 99,
+ })
+ end
+ end
+ end
+ end
end
describe "paper trail" do
diff --git a/spec/services/exports/export_service_spec.rb b/spec/services/exports/export_service_spec.rb
new file mode 100644
index 000000000..fb52c5274
--- /dev/null
+++ b/spec/services/exports/export_service_spec.rb
@@ -0,0 +1,333 @@
+require "rails_helper"
+
+RSpec.describe Exports::ExportService do
+ subject(:export_service) { described_class.new(storage_service) }
+
+ let(:storage_service) { instance_double(Storage::S3Service) }
+ let(:expected_master_manifest_filename) { "Manifest_2022_05_01_0001.csv" }
+ let(:start_time) { Time.zone.local(2022, 5, 1) }
+ let(:user) { FactoryBot.create(:user, email: "test1@example.com") }
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: {}) }
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: {}) }
+
+ before do
+ Timecop.freeze(start_time)
+ Singleton.__init__(FormHandler)
+ allow(storage_service).to receive(:write_file)
+ allow(Exports::LettingsLogExportService).to receive(:new).and_return(lettings_logs_export_service)
+ allow(Exports::UserExportService).to receive(:new).and_return(users_export_service)
+ allow(Exports::OrganisationExportService).to receive(:new).and_return(organisations_export_service)
+ end
+
+ after do
+ Timecop.return
+ end
+
+ context "when exporting daily XMLs" do
+ context "and no lettings archives get created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: {}) }
+
+ context "and no user or organisation archives get created in user export" do
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers but no data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and one user archive gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and one organisation archive gets created in organisation export" do
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: { "some_organisation_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_organisation_file_base_name,2022-05-01 00:00:00 +0100,some_organisation_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and user and organisation archive gets created in organisation export" do
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: { "some_organisation_file_base_name" => start_time }) }
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\nsome_organisation_file_base_name,2022-05-01 00:00:00 +0100,some_organisation_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+
+ context "and one lettings archive gets created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ context "and no user archives get created in user export" do
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and one user archive gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+
+ context "and multiple lettings archives get created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time, "second_file_base_name" => start_time }) }
+
+ context "and no user archives get created in user export" do
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\nsecond_file_base_name,2022-05-01 00:00:00 +0100,second_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and multiple user archive gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time, "second_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\nsecond_file_base_name,2022-05-01 00:00:00 +0100,second_file_base_name.zip\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\nsecond_user_file_base_name,2022-05-01 00:00:00 +0100,second_user_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+
+ context "and multiple user and organisation archives gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time, "second_user_file_base_name" => start_time }) }
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: { "some_organisation_file_base_name" => start_time, "second_organisation_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml
+ end
+
+ it "generates a master manifest with CSV headers and correct data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\nsecond_file_base_name,2022-05-01 00:00:00 +0100,second_file_base_name.zip\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\nsecond_user_file_base_name,2022-05-01 00:00:00 +0100,second_user_file_base_name.zip\nsome_organisation_file_base_name,2022-05-01 00:00:00 +0100,some_organisation_file_base_name.zip\nsecond_organisation_file_base_name,2022-05-01 00:00:00 +0100,second_organisation_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+ end
+
+ context "when exporting specific lettings log collection" do
+ context "and no lettings archives get created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: {}) }
+
+ context "and user archive gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "2022")
+ end
+
+ it "does not write user data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "2022")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+
+ context "and lettings archive gets created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ context "and user archive gets created in user export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "2023")
+ end
+
+ it "does not write user data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_file_base_name,2022-05-01 00:00:00 +0100,some_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "2023")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+ end
+
+ context "when exporting user collection" do
+ context "and no user archives get created in users export" do
+ context "and lettings log archive gets created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "users")
+ end
+
+ it "does not write lettings log data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "users")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+
+ context "and users archive gets created in users export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ context "and lettings log archive gets created in lettings log export" do
+ let(:users_export_service) { instance_double("Exports::UserExportService", export_xml_users: { "some_user_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "users")
+ end
+
+ it "does not write lettings log data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_user_file_base_name,2022-05-01 00:00:00 +0100,some_user_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "users")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+ end
+
+ context "when exporting organisation collection" do
+ context "and no organisation archives get created in organisations export" do
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: {}) }
+
+ context "and lettings log archive gets created in lettings logs export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "organisations")
+ end
+
+ it "does not write lettings log data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "organisations")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+
+ context "and organisations archive gets created in organisations export" do
+ let(:lettings_logs_export_service) { instance_double("Exports::LettingsLogExportService", export_xml_lettings_logs: { "some_file_base_name" => start_time }) }
+
+ context "and lettings log archive gets created in lettings log export" do
+ let(:organisations_export_service) { instance_double("Exports::OrganisationExportService", export_xml_organisations: { "some_organisation_file_base_name" => start_time }) }
+
+ it "generates a master manifest with the correct name" do
+ expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
+ export_service.export_xml(full_update: true, collection: "organisations")
+ end
+
+ it "does not write lettings log data" do
+ actual_content = nil
+ expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\nsome_organisation_file_base_name,2022-05-01 00:00:00 +0100,some_organisation_file_base_name.zip\n"
+ allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
+
+ export_service.export_xml(full_update: true, collection: "organisations")
+ expect(actual_content).to eq(expected_content)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/exports/lettings_log_export_service_spec.rb b/spec/services/exports/lettings_log_export_service_spec.rb
index b3f33f24f..6f7d88c91 100644
--- a/spec/services/exports/lettings_log_export_service_spec.rb
+++ b/spec/services/exports/lettings_log_export_service_spec.rb
@@ -1,7 +1,7 @@
require "rails_helper"
RSpec.describe Exports::LettingsLogExportService do
- subject(:export_service) { described_class.new(storage_service) }
+ subject(:export_service) { described_class.new(storage_service, start_time) }
let(:storage_service) { instance_double(Storage::S3Service) }
@@ -11,8 +11,6 @@ RSpec.describe Exports::LettingsLogExportService do
let(:real_2021_2022_form) { Form.new("config/forms/2021_2022.json") }
let(:real_2022_2023_form) { Form.new("config/forms/2022_2023.json") }
- let(:expected_master_manifest_filename) { "Manifest_2022_05_01_0001.csv" }
- let(:expected_master_manifest_rerun) { "Manifest_2022_05_01_0002.csv" }
let(:expected_zip_filename) { "core_2021_2022_apr_mar_f0001_inc0001.zip" }
let(:expected_data_filename) { "core_2021_2022_apr_mar_f0001_inc0001_pt001.xml" }
let(:expected_manifest_filename) { "manifest.xml" }
@@ -49,18 +47,9 @@ RSpec.describe Exports::LettingsLogExportService do
context "when exporting daily lettings logs in XML" do
context "and no lettings logs is available for export" do
- it "generates a master manifest with the correct name" do
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args)
- export_service.export_xml_lettings_logs
- end
-
- it "generates a master manifest with CSV headers but no data" do
- actual_content = nil
- expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
- allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
-
- export_service.export_xml_lettings_logs
- expect(actual_content).to eq(expected_content)
+ it "returns an empty archives list" do
+ expect(storage_service).not_to receive(:write_file)
+ expect(export_service.export_xml_lettings_logs).to eq({})
end
end
@@ -83,13 +72,9 @@ RSpec.describe Exports::LettingsLogExportService do
)
end
- it "generates a master manifest with CSV headers but no data" do
- actual_content = nil
- expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\n"
- allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
-
- export_service.export_xml_lettings_logs
- expect(actual_content).to eq(expected_content)
+ it "returns empty archives list for archives manifest" do
+ expect(storage_service).not_to receive(:write_file)
+ expect(export_service.export_xml_lettings_logs).to eq({})
end
end
@@ -101,15 +86,6 @@ RSpec.describe Exports::LettingsLogExportService do
export_service.export_xml_lettings_logs
end
- it "generates an XML manifest file with the expected filename within the ZIP file" do
- expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
- entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
- expect(entry).not_to be_nil
- expect(entry.name).to eq(expected_manifest_filename)
- end
- export_service.export_xml_lettings_logs
- end
-
it "generates an XML export file with the expected filename within the ZIP file" do
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
@@ -141,13 +117,8 @@ RSpec.describe Exports::LettingsLogExportService do
export_service.export_xml_lettings_logs
end
- it "generates a master manifest with CSV headers" do
- actual_content = nil
- expected_content = "zip-name,date-time zipped folder generated,zip-file-uri\ncore_2021_2022_apr_mar_f0001_inc0001,2022-05-01 00:00:00 +0100,#{expected_zip_filename}\n"
- allow(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) { |_, arg2| actual_content = arg2&.string }
-
- export_service.export_xml_lettings_logs
- expect(actual_content).to eq(expected_content)
+ it "returns the list with correct archive" do
+ expect(export_service.export_xml_lettings_logs).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
end
end
@@ -178,8 +149,10 @@ RSpec.describe Exports::LettingsLogExportService do
end
context "with 23/24 collection period" do
+ let(:start_time) { Time.zone.local(2023, 4, 3) }
+
before do
- Timecop.freeze(Time.zone.local(2023, 4, 3))
+ Timecop.freeze(start_time)
Singleton.__init__(FormHandler)
stub_request(:get, "https://api.os.uk/search/places/v1/uprn?dataset=DPA,LPI&key=OS_DATA_KEY&uprn=100023336956")
.to_return(status: 200, body: '{"status":200,"results":[{"DPA":{
@@ -234,15 +207,15 @@ RSpec.describe Exports::LettingsLogExportService do
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
expect(storage_service).to receive(:write_file).with(expected_zip_filename2, any_args)
expect(Rails.logger).to receive(:info).with("Building export run for 2021")
- expect(Rails.logger).to receive(:info).with("Creating core_2021_2022_apr_mar_f0001_inc0001 - 1 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2021_2022_apr_mar_f0001_inc0001 - 1 resources")
expect(Rails.logger).to receive(:info).with("Added core_2021_2022_apr_mar_f0001_inc0001_pt001.xml")
expect(Rails.logger).to receive(:info).with("Writing core_2021_2022_apr_mar_f0001_inc0001.zip")
expect(Rails.logger).to receive(:info).with("Building export run for 2022")
- expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 resources")
expect(Rails.logger).to receive(:info).with("Added core_2022_2023_apr_mar_f0001_inc0001_pt001.xml")
expect(Rails.logger).to receive(:info).with("Writing core_2022_2023_apr_mar_f0001_inc0001.zip")
expect(Rails.logger).to receive(:info).with("Building export run for 2023")
- expect(Rails.logger).to receive(:info).with("Creating core_2023_2024_apr_mar_f0001_inc0001 - 0 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2023_2024_apr_mar_f0001_inc0001 - 0 resources")
export_service.export_xml_lettings_logs
end
@@ -250,7 +223,7 @@ RSpec.describe Exports::LettingsLogExportService do
it "generates zip export files only for specified year" do
expect(storage_service).to receive(:write_file).with(expected_zip_filename2, any_args)
expect(Rails.logger).to receive(:info).with("Building export run for 2022")
- expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 resources")
expect(Rails.logger).to receive(:info).with("Added core_2022_2023_apr_mar_f0001_inc0001_pt001.xml")
expect(Rails.logger).to receive(:info).with("Writing core_2022_2023_apr_mar_f0001_inc0001.zip")
@@ -262,22 +235,22 @@ RSpec.describe Exports::LettingsLogExportService do
let(:expected_zip_filename2) { "core_2022_2023_apr_mar_f0001_inc0001.zip" }
before do
- LogsExport.new(started_at: Time.zone.yesterday, base_number: 7, increment_number: 3, collection: 2021).save!
+ Export.new(started_at: Time.zone.yesterday, base_number: 7, increment_number: 3, collection: 2021).save!
end
it "generates multiple ZIP export files with different base numbers in the filenames" do
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
expect(storage_service).to receive(:write_file).with(expected_zip_filename2, any_args)
expect(Rails.logger).to receive(:info).with("Building export run for 2021")
- expect(Rails.logger).to receive(:info).with("Creating core_2021_2022_apr_mar_f0007_inc0004 - 1 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2021_2022_apr_mar_f0007_inc0004 - 1 resources")
expect(Rails.logger).to receive(:info).with("Added core_2021_2022_apr_mar_f0007_inc0004_pt001.xml")
expect(Rails.logger).to receive(:info).with("Writing core_2021_2022_apr_mar_f0007_inc0004.zip")
expect(Rails.logger).to receive(:info).with("Building export run for 2022")
- expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2022_2023_apr_mar_f0001_inc0001 - 1 resources")
expect(Rails.logger).to receive(:info).with("Added core_2022_2023_apr_mar_f0001_inc0001_pt001.xml")
expect(Rails.logger).to receive(:info).with("Writing core_2022_2023_apr_mar_f0001_inc0001.zip")
expect(Rails.logger).to receive(:info).with("Building export run for 2023")
- expect(Rails.logger).to receive(:info).with("Creating core_2023_2024_apr_mar_f0001_inc0001 - 0 logs")
+ expect(Rails.logger).to receive(:info).with("Creating core_2023_2024_apr_mar_f0001_inc0001 - 0 resources")
export_service.export_xml_lettings_logs
end
@@ -304,18 +277,13 @@ RSpec.describe Exports::LettingsLogExportService do
it "creates a logs export record in a database with correct time" do
expect { export_service.export_xml_lettings_logs }
- .to change(LogsExport, :count).by(3)
- expect(LogsExport.last.started_at).to be_within(2.seconds).of(start_time)
+ .to change(Export, :count).by(3)
+ expect(Export.last.started_at).to be_within(2.seconds).of(start_time)
end
context "when this is the first export (full)" do
- it "records a ZIP archive in the master manifest (existing lettings logs)" do
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_filename, any_args) do |_, csv_content|
- csv = CSV.parse(csv_content, headers: true)
- expect(csv&.count).to be > 0
- end
-
- export_service.export_xml_lettings_logs
+ it "returns a ZIP archive for the master manifest (existing lettings logs)" do
+ expect(export_service.export_xml_lettings_logs).to eq({ expected_zip_filename.gsub(".zip", "").gsub(".zip", "") => start_time })
end
end
@@ -360,15 +328,12 @@ RSpec.describe Exports::LettingsLogExportService do
context "when this is a second export (partial)" do
before do
start_time = Time.zone.local(2022, 6, 1)
- LogsExport.new(started_at: start_time).save!
+ Export.new(started_at: start_time, collection: 2021).save!
end
- it "does not add any entry in the master manifest (no lettings logs)" do
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_rerun, any_args) do |_, csv_content|
- csv = CSV.parse(csv_content, headers: true)
- expect(csv&.count).to eq(0)
- end
- export_service.export_xml_lettings_logs
+ it "does not add any entry for the master manifest (no lettings logs)" do
+ expect(storage_service).not_to receive(:write_file)
+ expect(export_service.export_xml_lettings_logs).to eq({})
end
end
end
@@ -379,28 +344,19 @@ RSpec.describe Exports::LettingsLogExportService do
export_service.export_xml_lettings_logs
end
- it "increments the master manifest number by 1" do
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_rerun, any_args)
- export_service.export_xml_lettings_logs
- end
-
context "and we trigger another full update" do
it "increments the base number" do
export_service.export_xml_lettings_logs(full_update: true)
- expect(LogsExport.last.base_number).to eq(2)
+ expect(Export.last.base_number).to eq(2)
end
it "resets the increment number" do
export_service.export_xml_lettings_logs(full_update: true)
- expect(LogsExport.last.increment_number).to eq(1)
+ expect(Export.last.increment_number).to eq(1)
end
- it "records a ZIP archive in the master manifest (existing lettings logs)" do
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_rerun, any_args) do |_, csv_content|
- csv = CSV.parse(csv_content, headers: true)
- expect(csv&.count).to be > 0
- end
- export_service.export_xml_lettings_logs(full_update: true)
+ it "returns a correct archives list for manifest file" do
+ expect(export_service.export_xml_lettings_logs(full_update: true)).to eq({ "core_2021_2022_apr_mar_f0002_inc0001" => start_time })
end
it "generates a ZIP export file with the expected filename" do
@@ -416,7 +372,7 @@ RSpec.describe Exports::LettingsLogExportService do
it "doesn't increment the manifest number by 1" do
export_service.export_xml_lettings_logs
- expect(LogsExport.last.increment_number).to eq(1)
+ expect(Export.last.increment_number).to eq(1)
end
end
@@ -424,19 +380,18 @@ RSpec.describe Exports::LettingsLogExportService do
before do
FactoryBot.create(:lettings_log, startdate: Time.zone.local(2022, 2, 1), updated_at: Time.zone.local(2022, 4, 27), values_updated_at: Time.zone.local(2022, 4, 29))
FactoryBot.create(:lettings_log, startdate: Time.zone.local(2022, 2, 1), updated_at: Time.zone.local(2022, 4, 27), values_updated_at: Time.zone.local(2022, 4, 29))
- LogsExport.create!(started_at: Time.zone.local(2022, 4, 28), base_number: 1, increment_number: 1)
+ Export.create!(started_at: Time.zone.local(2022, 4, 28), base_number: 1, increment_number: 1)
end
it "generates an XML manifest file with the expected content within the ZIP file" do
expected_content = replace_record_number(local_manifest_file.read, 2)
- expect(storage_service).to receive(:write_file).with(expected_master_manifest_rerun, any_args)
expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
expect(entry).not_to be_nil
expect(entry.get_input_stream.read).to eq(expected_content)
end
- export_service.export_xml_lettings_logs
+ expect(export_service.export_xml_lettings_logs).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
end
end
@@ -461,8 +416,10 @@ RSpec.describe Exports::LettingsLogExportService do
end
context "with 24/25 collection period" do
+ let(:start_time) { Time.zone.local(2024, 4, 3) }
+
before do
- Timecop.freeze(Time.zone.local(2024, 4, 3))
+ Timecop.freeze(start_time)
Singleton.__init__(FormHandler)
end
diff --git a/spec/services/exports/organisation_export_service_spec.rb b/spec/services/exports/organisation_export_service_spec.rb
new file mode 100644
index 000000000..4de0e84a8
--- /dev/null
+++ b/spec/services/exports/organisation_export_service_spec.rb
@@ -0,0 +1,219 @@
+require "rails_helper"
+
+RSpec.describe Exports::OrganisationExportService do
+ subject(:export_service) { described_class.new(storage_service, start_time) }
+
+ let(:storage_service) { instance_double(Storage::S3Service) }
+
+ let(:xml_export_file) { File.open("spec/fixtures/exports/organisation.xml", "r:UTF-8") }
+ let(:local_manifest_file) { File.open("spec/fixtures/exports/manifest.xml", "r:UTF-8") }
+
+ let(:expected_zip_filename) { "organisations_2024_2025_apr_mar_f0001_inc0001.zip" }
+ let(:expected_data_filename) { "organisations_2024_2025_apr_mar_f0001_inc0001_pt001.xml" }
+ let(:expected_manifest_filename) { "manifest.xml" }
+ let(:start_time) { Time.zone.local(2022, 5, 1) }
+ let(:organisation) { create(:organisation, with_dsa: false) }
+
+ def replace_entity_ids(organisation, export_template)
+ export_template.sub!(/\{id\}/, organisation["id"].to_s)
+ export_template.sub!(/\{dsa_signed_at\}/, organisation.data_protection_confirmation&.signed_at.to_s)
+ export_template.sub!(/\{dpo_email\}/, organisation.data_protection_confirmation&.data_protection_officer_email)
+ end
+
+ def replace_record_number(export_template, record_number)
+ export_template.sub!(/\{recno\}/, record_number.to_s)
+ end
+
+ before do
+ Timecop.freeze(start_time)
+ Singleton.__init__(FormHandler)
+ allow(storage_service).to receive(:write_file)
+ end
+
+ after do
+ Timecop.return
+ end
+
+ context "when exporting daily organisations in XML" do
+ context "and no organisations are available for export" do
+ it "returns an empty archives list" do
+ expect(export_service.export_xml_organisations).to eq({})
+ end
+ end
+
+ context "and one organisation is available for export" do
+ let!(:organisation) { create(:organisation) }
+
+ it "generates a ZIP export file with the expected filename" do
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
+ export_service.export_xml_organisations
+ end
+
+ it "generates an XML export file with the expected filename within the ZIP file" do
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
+ expect(entry).not_to be_nil
+ expect(entry.name).to eq(expected_data_filename)
+ end
+ export_service.export_xml_organisations
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 1)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_organisations
+ end
+
+ it "generates an XML export file with the expected content within the ZIP file" do
+ expected_content = replace_entity_ids(organisation, xml_export_file.read)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_organisations
+ end
+
+ it "returns the list with correct archive" do
+ expect(export_service.export_xml_organisations).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
+ end
+ end
+
+ context "and multiple organisations are available for export" do
+ before do
+ create(:organisation)
+ create(:organisation)
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 2)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_organisations
+ end
+
+ it "creates an export record in a database with correct time" do
+ expect { export_service.export_xml_organisations }
+ .to change(Export, :count).by(1)
+ expect(Export.last.started_at).to be_within(2.seconds).of(start_time)
+ end
+
+ context "when this is the first export (full)" do
+ it "returns a ZIP archive for the master manifest" do
+ expect(export_service.export_xml_organisations).to eq({ expected_zip_filename.gsub(".zip", "").gsub(".zip", "") => start_time })
+ end
+ end
+
+ context "and underlying data changes between getting the organisations and writting the manifest" do
+ def remove_organisations(organisations)
+ organisations.each(&:destroy)
+ file = Tempfile.new
+ doc = Nokogiri::XML("")
+ doc.write_xml_to(file, encoding: "UTF-8")
+ file.rewind
+ file
+ end
+
+ def create_fake_maifest
+ file = Tempfile.new
+ doc = Nokogiri::XML("")
+ doc.write_xml_to(file, encoding: "UTF-8")
+ file.rewind
+ file
+ end
+
+ it "maintains the same record number" do
+ # rubocop:disable RSpec/SubjectStub
+ allow(export_service).to receive(:build_export_xml) do |organisations|
+ remove_organisations(organisations)
+ end
+ allow(export_service).to receive(:build_manifest_xml) do
+ create_fake_maifest
+ end
+
+ expect(export_service).to receive(:build_manifest_xml).with(2)
+ # rubocop:enable RSpec/SubjectStub
+ export_service.export_xml_organisations
+ end
+ end
+
+ context "when this is a second export (partial)" do
+ before do
+ start_time = Time.zone.local(2022, 6, 1)
+ Export.new(started_at: start_time, collection: "organisations").save! # this should be organisation export
+ end
+
+ it "does not add any entry for the master manifest (no organisations)" do
+ expect(export_service.export_xml_organisations).to eq({})
+ end
+ end
+ end
+
+ context "and a previous export has run the same day having organisations" do
+ before do
+ create(:organisation)
+ export_service.export_xml_organisations
+ end
+
+ context "and we trigger another full update" do
+ it "increments the base number" do
+ export_service.export_xml_organisations(full_update: true)
+ expect(Export.last.base_number).to eq(2)
+ end
+
+ it "resets the increment number" do
+ export_service.export_xml_organisations(full_update: true)
+ expect(Export.last.increment_number).to eq(1)
+ end
+
+ it "returns a correct archives list for manifest file" do
+ expect(export_service.export_xml_organisations(full_update: true)).to eq({ "organisations_2024_2025_apr_mar_f0002_inc0001" => start_time })
+ end
+
+ it "generates a ZIP export file with the expected filename" do
+ expect(storage_service).to receive(:write_file).with("organisations_2024_2025_apr_mar_f0002_inc0001.zip", any_args)
+ export_service.export_xml_organisations(full_update: true)
+ end
+ end
+ end
+
+ context "and a previous export has run having no organisations" do
+ before { export_service.export_xml_organisations }
+
+ it "doesn't increment the manifest number by 1" do
+ export_service.export_xml_organisations
+
+ expect(Export.last.increment_number).to eq(1)
+ end
+ end
+
+ context "and an organisation has been migrated since the previous partial export" do
+ before do
+ create(:organisation, updated_at: Time.zone.local(2022, 4, 27))
+ create(:organisation, updated_at: Time.zone.local(2022, 4, 27))
+ Export.create!(started_at: Time.zone.local(2022, 4, 26), base_number: 1, increment_number: 1)
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 2)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ expect(export_service.export_xml_organisations).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
+ end
+ end
+ end
+end
diff --git a/spec/services/exports/user_export_service_spec.rb b/spec/services/exports/user_export_service_spec.rb
new file mode 100644
index 000000000..713d6f907
--- /dev/null
+++ b/spec/services/exports/user_export_service_spec.rb
@@ -0,0 +1,219 @@
+require "rails_helper"
+
+RSpec.describe Exports::UserExportService do
+ subject(:export_service) { described_class.new(storage_service, start_time) }
+
+ let(:storage_service) { instance_double(Storage::S3Service) }
+
+ let(:xml_export_file) { File.open("spec/fixtures/exports/user.xml", "r:UTF-8") }
+ let(:local_manifest_file) { File.open("spec/fixtures/exports/manifest.xml", "r:UTF-8") }
+
+ let(:expected_zip_filename) { "users_2024_2025_apr_mar_f0001_inc0001.zip" }
+ let(:expected_data_filename) { "users_2024_2025_apr_mar_f0001_inc0001_pt001.xml" }
+ let(:expected_manifest_filename) { "manifest.xml" }
+ let(:start_time) { Time.zone.local(2022, 5, 1) }
+ let(:organisation) { create(:organisation, with_dsa: false) }
+
+ def replace_entity_ids(user, export_template)
+ export_template.sub!(/\{id\}/, user["id"].to_s)
+ export_template.sub!(/\{organisation_id\}/, user["organisation_id"].to_s)
+ export_template.sub!(/\{email\}/, user["email"].to_s)
+ end
+
+ def replace_record_number(export_template, record_number)
+ export_template.sub!(/\{recno\}/, record_number.to_s)
+ end
+
+ before do
+ Timecop.freeze(start_time)
+ Singleton.__init__(FormHandler)
+ allow(storage_service).to receive(:write_file)
+ end
+
+ after do
+ Timecop.return
+ end
+
+ context "when exporting daily users in XML" do
+ context "and no users are available for export" do
+ it "returns an empty archives list" do
+ expect(export_service.export_xml_users).to eq({})
+ end
+ end
+
+ context "and one user is available for export" do
+ let!(:user) { create(:user, organisation:, phone_extension: "123") }
+
+ it "generates a ZIP export file with the expected filename" do
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args)
+ export_service.export_xml_users
+ end
+
+ it "generates an XML export file with the expected filename within the ZIP file" do
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
+ expect(entry).not_to be_nil
+ expect(entry.name).to eq(expected_data_filename)
+ end
+ export_service.export_xml_users
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 1)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_users
+ end
+
+ it "generates an XML export file with the expected content within the ZIP file" do
+ expected_content = replace_entity_ids(user, xml_export_file.read)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_data_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_users
+ end
+
+ it "returns the list with correct archive" do
+ expect(export_service.export_xml_users).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
+ end
+ end
+
+ context "and multiple users are available for export" do
+ before do
+ create(:user, organisation:)
+ create(:user, organisation:)
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 2)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ export_service.export_xml_users
+ end
+
+ it "creates an export record in a database with correct time" do
+ expect { export_service.export_xml_users }
+ .to change(Export, :count).by(1)
+ expect(Export.last.started_at).to be_within(2.seconds).of(start_time)
+ end
+
+ context "when this is the first export (full)" do
+ it "returns a ZIP archive for the master manifest (existing lettings logs)" do
+ expect(export_service.export_xml_users).to eq({ expected_zip_filename.gsub(".zip", "").gsub(".zip", "") => start_time })
+ end
+ end
+
+ context "and underlying data changes between getting the users and writting the manifest" do
+ def remove_users(users)
+ users.each(&:destroy)
+ file = Tempfile.new
+ doc = Nokogiri::XML("")
+ doc.write_xml_to(file, encoding: "UTF-8")
+ file.rewind
+ file
+ end
+
+ def create_fake_maifest
+ file = Tempfile.new
+ doc = Nokogiri::XML("")
+ doc.write_xml_to(file, encoding: "UTF-8")
+ file.rewind
+ file
+ end
+
+ it "maintains the same record number" do
+ # rubocop:disable RSpec/SubjectStub
+ allow(export_service).to receive(:build_export_xml) do |users|
+ remove_users(users)
+ end
+ allow(export_service).to receive(:build_manifest_xml) do
+ create_fake_maifest
+ end
+
+ expect(export_service).to receive(:build_manifest_xml).with(2)
+ # rubocop:enable RSpec/SubjectStub
+ export_service.export_xml_users
+ end
+ end
+
+ context "when this is a second export (partial)" do
+ before do
+ start_time = Time.zone.local(2022, 6, 1)
+ Export.new(started_at: start_time, collection: "users").save! # this should be user export
+ end
+
+ it "does not add any entry for the master manifest (no users)" do
+ expect(export_service.export_xml_users).to eq({})
+ end
+ end
+ end
+
+ context "and a previous export has run the same day having users" do
+ before do
+ create(:user, organisation:)
+ export_service.export_xml_users
+ end
+
+ context "and we trigger another full update" do
+ it "increments the base number" do
+ export_service.export_xml_users(full_update: true)
+ expect(Export.last.base_number).to eq(2)
+ end
+
+ it "resets the increment number" do
+ export_service.export_xml_users(full_update: true)
+ expect(Export.last.increment_number).to eq(1)
+ end
+
+ it "returns a correct archives list for manifest file" do
+ expect(export_service.export_xml_users(full_update: true)).to eq({ "users_2024_2025_apr_mar_f0002_inc0001" => start_time })
+ end
+
+ it "generates a ZIP export file with the expected filename" do
+ expect(storage_service).to receive(:write_file).with("users_2024_2025_apr_mar_f0002_inc0001.zip", any_args)
+ export_service.export_xml_users(full_update: true)
+ end
+ end
+ end
+
+ context "and a previous export has run having no users" do
+ before { export_service.export_xml_users }
+
+ it "doesn't increment the manifest number by 1" do
+ export_service.export_xml_users
+
+ expect(Export.last.increment_number).to eq(1)
+ end
+ end
+
+ context "and a user has been migrated since the previous partial export" do
+ before do
+ create(:user, updated_at: Time.zone.local(2022, 4, 27), organisation:)
+ create(:user, updated_at: Time.zone.local(2022, 4, 27), organisation:)
+ Export.create!(started_at: Time.zone.local(2022, 4, 26), base_number: 1, increment_number: 1)
+ end
+
+ it "generates an XML manifest file with the expected content within the ZIP file" do
+ expected_content = replace_record_number(local_manifest_file.read, 2)
+ expect(storage_service).to receive(:write_file).with(expected_zip_filename, any_args) do |_, content|
+ entry = Zip::File.open_buffer(content).find_entry(expected_manifest_filename)
+ expect(entry).not_to be_nil
+ expect(entry.get_input_stream.read).to eq(expected_content)
+ end
+
+ expect(export_service.export_xml_users).to eq({ expected_zip_filename.gsub(".zip", "") => start_time })
+ end
+ end
+ end
+end