50 changed files with 881 additions and 66 deletions
@ -1,10 +1,12 @@
|
||||
<div class="govuk-button-group app-filter-toggle"> |
||||
<% if @organisation.data_protection_confirmed? %> |
||||
<% if current_page?(controller: 'organisations', action: 'lettings_logs') %> |
||||
<%= govuk_button_to "Create a new lettings log for this organisation", lettings_logs_path(lettings_log: { owning_organisation_id: @organisation.id }, method: :post) %> |
||||
<% end %> |
||||
<%= govuk_button_to "Create a new lettings log for this organisation", lettings_logs_path(lettings_log: { owning_organisation_id: @organisation.id }, method: :post), class: "govuk-!-margin-right-6" %> |
||||
<%= govuk_button_link_to "Upload lettings logs in bulk", bulk_upload_lettings_log_path(id: "start", organisation_id: @organisation.id), secondary: true %> |
||||
<% end %> |
||||
<% if current_page?(controller: 'organisations', action: 'sales_logs') %> |
||||
<%= govuk_button_to "Create a new sales log for this organisation", sales_logs_path(sales_log: { owning_organisation_id: @organisation.id }, method: :post) %> |
||||
<% end %> |
||||
<%= govuk_button_to "Create a new sales log for this organisation", sales_logs_path(sales_log: { owning_organisation_id: @organisation.id }, method: :post), class: "govuk-!-margin-right-6" %> |
||||
<%= govuk_button_link_to "Upload sales logs in bulk", bulk_upload_sales_log_path(id: "start", organisation_id: @organisation.id), secondary: true %> |
||||
<% end %> |
||||
<% end %> |
||||
</div> |
||||
|
||||
@ -0,0 +1,5 @@
|
||||
class AddOrganisationIdToBulkUploads < ActiveRecord::Migration[7.0] |
||||
def change |
||||
add_column :bulk_uploads, :organisation_id, :integer |
||||
end |
||||
end |
||||
@ -0,0 +1,5 @@
|
||||
class AddValidationCheckedField < ActiveRecord::Migration[7.0] |
||||
def change |
||||
add_column :log_validations, :checked, :boolean |
||||
end |
||||
end |
||||
@ -0,0 +1,63 @@
|
||||
namespace :count_duplicates do |
||||
desc "Count the number of duplicate schemes per organisation" |
||||
task scheme_duplicates_per_org: :environment do |
||||
duplicates_csv = CSV.generate(headers: true) do |csv| |
||||
csv << ["Organisation id", "Number of duplicate sets", "Total duplicate schemes"] |
||||
|
||||
Organisation.visible.each do |organisation| |
||||
if organisation.owned_schemes.duplicate_sets.count.positive? |
||||
csv << [organisation.id, organisation.owned_schemes.duplicate_sets.count, organisation.owned_schemes.duplicate_sets.sum(&:size)] |
||||
end |
||||
end |
||||
end |
||||
|
||||
filename = "scheme-duplicates-#{Time.zone.now}.csv" |
||||
storage_service = Storage::S3Service.new(Configuration::EnvConfigurationService.new, ENV["BULK_UPLOAD_BUCKET"]) |
||||
storage_service.write_file(filename, "#{duplicates_csv}") |
||||
|
||||
url = storage_service.get_presigned_url(filename, 72.hours.to_i) |
||||
Rails.logger.info("Download URL: #{url}") |
||||
end |
||||
|
||||
desc "Count the number of duplicate locations per organisation" |
||||
task location_duplicates_per_org: :environment do |
||||
duplicates_csv = CSV.generate(headers: true) do |csv| |
||||
csv << ["Organisation id", "Duplicate sets within individual schemes", "Duplicate locations within individual schemes", "All duplicate sets", "All duplicates"] |
||||
|
||||
Organisation.visible.each do |organisation| |
||||
duplicate_sets_within_individual_schemes = [] |
||||
|
||||
organisation.owned_schemes.each do |scheme| |
||||
duplicate_sets_within_individual_schemes += scheme.locations.duplicate_sets |
||||
end |
||||
duplicate_locations_within_individual_schemes = duplicate_sets_within_individual_schemes.flatten |
||||
|
||||
duplicate_sets_within_duplicate_schemes = [] |
||||
if organisation.owned_schemes.duplicate_sets.count.positive? |
||||
organisation.owned_schemes.duplicate_sets.each do |duplicate_set| |
||||
duplicate_sets_within_duplicate_schemes += Location.where(scheme_id: duplicate_set).duplicate_sets_within_given_schemes |
||||
end |
||||
duplicate_locations_within_duplicate_schemes_ids = duplicate_sets_within_duplicate_schemes.flatten |
||||
|
||||
duplicate_sets_within_individual_schemes_without_intersecting_sets = duplicate_sets_within_individual_schemes.reject { |set| set.any? { |id| duplicate_sets_within_duplicate_schemes.any? { |duplicate_set| duplicate_set.include?(id) } } } |
||||
all_duplicate_sets_count = (duplicate_sets_within_individual_schemes_without_intersecting_sets + duplicate_sets_within_duplicate_schemes).count |
||||
all_duplicate_locations_count = (duplicate_locations_within_duplicate_schemes_ids + duplicate_locations_within_individual_schemes).uniq.count |
||||
else |
||||
all_duplicate_sets_count = duplicate_sets_within_individual_schemes.count |
||||
all_duplicate_locations_count = duplicate_locations_within_individual_schemes.count |
||||
end |
||||
|
||||
if all_duplicate_locations_count.positive? |
||||
csv << [organisation.id, duplicate_sets_within_individual_schemes.count, duplicate_locations_within_individual_schemes.count, all_duplicate_sets_count, all_duplicate_locations_count] |
||||
end |
||||
end |
||||
end |
||||
|
||||
filename = "location-duplicates-#{Time.zone.now}.csv" |
||||
storage_service = Storage::S3Service.new(Configuration::EnvConfigurationService.new, ENV["BULK_UPLOAD_BUCKET"]) |
||||
storage_service.write_file(filename, "#{duplicates_csv}") |
||||
|
||||
url = storage_service.get_presigned_url(filename, 72.hours.to_i) |
||||
Rails.logger.info("Download URL: #{url}") |
||||
end |
||||
end |
||||
@ -0,0 +1,124 @@
|
||||
cd performance_test |
||||
|
||||
# Lettings logs page |
||||
echo "Get token" |
||||
TOKEN=$(curl -c token_cookies.txt -s https://staging.submit-social-housing-data.levellingup.gov.uk/account/sign-in | grep '<meta name="csrf-token"' | sed -n 's/.*content="\([^"]*\)".*/\1/p') |
||||
|
||||
echo "Logging in..." |
||||
echo $email |
||||
curl -L -o nul -c login_cookies.txt -b token_cookies.txt -X POST https://staging.submit-social-housing-data.levellingup.gov.uk/account/sign-in \ |
||||
-d "user[email]=$email" \ |
||||
-d "user[password]=$password" \ |
||||
-d "authenticity_token=$TOKEN" |
||||
|
||||
COOKIES=$(awk '/_data_collector_session/ { print $6, $7 }' login_cookies.txt | tr ' ' '=') |
||||
|
||||
echo "Running lettings logs page performance test..." |
||||
ab -n 50 -c 50 -l -C "$COOKIES" 'https://staging.submit-social-housing-data.levellingup.gov.uk/lettings-logs?years[]=2024&status[]=completed' > performance_lettings_test_results.txt |
||||
file="performance_lettings_test_results.txt" |
||||
|
||||
failed_requests=$(grep "Failed requests:" "$file" | awk '{print $3}') |
||||
non_2xx_responses=$(grep "Non-2xx responses:" "$file" | awk '{print $3}') |
||||
time_per_request_all=$(grep "Time per request:" "$file" | awk 'NR==2{print $4}') |
||||
requests_per_second=$(grep "Requests per second:" "$file" | awk '{print $4}') |
||||
|
||||
|
||||
if [ "$failed_requests" -gt 0 ]; then |
||||
echo "Lettings logs: Performance test failed - $failed_requests failed requests" |
||||
exit 1 |
||||
fi |
||||
|
||||
if [ "$non_2xx_responses" -ne 0 ] && [ -n "$non_2xx_responses" ]; then |
||||
echo "Lettings logs: Performance test failed: There were $non_2xx_responses non-2xx responses." |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$time_per_request_all > 250" | bc -l) )); then |
||||
echo "Lettings logs: Performance test failed - Time per request across all concurrent requests is more than 250 ms: $time_per_request_all ms" |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$requests_per_second < 5" | bc -l) )); then |
||||
echo "Lettings logs: Performance test failed - Requests per second is less than 5: $requests_per_second" |
||||
exit 1 |
||||
fi |
||||
|
||||
echo "Lettings logs page test passed: No failed requests and no non-2xx responses." |
||||
|
||||
|
||||
# Sales logs page |
||||
echo "Running sales logs page performance test..." |
||||
ab -n 50 -c 50 -l -C "$COOKIES" 'https://staging.submit-social-housing-data.levellingup.gov.uk/sales-logs?years[]=2024&status[]=completed' > performance_sales_test_results.txt |
||||
file="performance_sales_test_results.txt" |
||||
|
||||
failed_requests=$(grep "Failed requests:" "$file" | awk '{print $3}') |
||||
non_2xx_responses=$(grep "Non-2xx responses:" "$file" | awk '{print $3}') |
||||
time_per_request_all=$(grep "Time per request:" "$file" | awk 'NR==2{print $4}') |
||||
requests_per_second=$(grep "Requests per second:" "$file" | awk '{print $4}') |
||||
|
||||
|
||||
if [ "$failed_requests" -gt 0 ]; then |
||||
echo "Sales logs: Performance test failed - $failed_requests failed requests" |
||||
exit 1 |
||||
fi |
||||
|
||||
if [ "$non_2xx_responses" -ne 0 ] && [ -n "$non_2xx_responses" ]; then |
||||
echo "Sales logs: Performance test failed: There were $non_2xx_responses non-2xx responses." |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$time_per_request_all > 250" | bc -l) )); then |
||||
echo "Sales logs: Performance test failed - Time per request across all concurrent requests is more than 250 ms: $time_per_request_all ms" |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$requests_per_second < 5" | bc -l) )); then |
||||
echo "Sales logs: Performance test failed - Requests per second is less than 5: $requests_per_second" |
||||
exit 1 |
||||
fi |
||||
|
||||
echo "Sales logs page test passed: No failed requests and no non-2xx responses." |
||||
|
||||
|
||||
# Post data to a log test |
||||
page_content=$(curl -b login_cookies.txt -s 'https://staging.submit-social-housing-data.levellingup.gov.uk/lettings-logs?years[]=2024&status[]=completed') |
||||
completed_log_link=$(echo "$page_content" | sed -n 's/.*<a class="govuk-link" href="\([^"]*lettings-logs[^"]*\)".*/\1/p' | head -n 1) |
||||
echo "testing post to $completed_log_link" |
||||
|
||||
TOKEN=$(curl -L -b login_cookies.txt -c login_cookies.txt https://staging.submit-social-housing-data.levellingup.gov.uk$completed_log_link/tenant-code | grep '<meta name="csrf-token"' | sed -n 's/.*content="\([^"]*\)".*/\1/p') |
||||
|
||||
COOKIES=$(awk '/_data_collector_session/ { print $6, $7 }' login_cookies.txt | tr ' ' '=') |
||||
|
||||
echo "lettings_log[tenancycode]=performance_test_tenancy_code&lettings_log[page]=tenant_code&authenticity_token=$TOKEN" > post_data.txt |
||||
|
||||
ab -n 50 -c 50 -l -T application/x-www-form-urlencoded \ |
||||
-H "X-CSRF-Token: $TOKEN" \ |
||||
-C "$COOKIES" \ |
||||
-p post_data.txt \ |
||||
"https://staging.submit-social-housing-data.levellingup.gov.uk$completed_log_link/tenant-code" > performance_post_test_results.txt |
||||
|
||||
file="performance_post_test_results.txt" |
||||
failed_requests=$(grep "Failed requests:" "$file" | awk '{print $3}') |
||||
time_per_request_all=$(grep "Time per request:" "$file" | awk 'NR==2{print $4}') |
||||
requests_per_second=$(grep "Requests per second:" "$file" | awk '{print $4}') |
||||
|
||||
|
||||
if [ "$failed_requests" -gt 0 ]; then |
||||
echo "Update logs: Performance test failed - $failed_requests failed requests" |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$time_per_request_all > 500" | bc -l) )); then |
||||
echo "Update logs: Performance test failed - Time per request across all concurrent requests is more than 500 ms: $time_per_request_all ms" |
||||
exit 1 |
||||
fi |
||||
|
||||
if (( $(echo "$requests_per_second < 3" | bc -l) )); then |
||||
echo "Update logs: Performance test failed - Requests per second is less than 3: $requests_per_second" |
||||
exit 1 |
||||
fi |
||||
|
||||
echo "Update logs test passed: No failed requests and request times as expected." |
||||
|
||||
echo "All tests passed" |
||||
exit 0 |
||||
@ -0,0 +1,111 @@
|
||||
require "rails_helper" |
||||
require "rake" |
||||
|
||||
RSpec.describe "count_duplicates" do |
||||
before do |
||||
allow(Storage::S3Service).to receive(:new).and_return(storage_service) |
||||
allow(storage_service).to receive(:write_file) |
||||
allow(storage_service).to receive(:get_presigned_url).and_return(test_url) |
||||
end |
||||
|
||||
describe "count_duplicates:scheme_duplicates_per_org", type: :task do |
||||
subject(:task) { Rake::Task["count_duplicates:scheme_duplicates_per_org"] } |
||||
|
||||
let(:storage_service) { instance_double(Storage::S3Service) } |
||||
let(:test_url) { "test_url" } |
||||
|
||||
before do |
||||
Rake.application.rake_require("tasks/count_duplicates") |
||||
Rake::Task.define_task(:environment) |
||||
task.reenable |
||||
end |
||||
|
||||
context "when the rake task is run" do |
||||
context "and there are no duplicate schemes" do |
||||
before do |
||||
create(:organisation) |
||||
end |
||||
|
||||
it "creates a csv with headers only" do |
||||
expect(storage_service).to receive(:write_file).with(/scheme-duplicates-.*\.csv/, "\uFEFFOrganisation id,Number of duplicate sets,Total duplicate schemes\n") |
||||
expect(Rails.logger).to receive(:info).with("Download URL: #{test_url}") |
||||
task.invoke |
||||
end |
||||
end |
||||
|
||||
context "and there are duplicate schemes" do |
||||
let(:organisation) { create(:organisation) } |
||||
let(:organisation2) { create(:organisation) } |
||||
|
||||
before do |
||||
create_list(:scheme, 2, :duplicate, owning_organisation: organisation) |
||||
create_list(:scheme, 3, :duplicate, primary_client_group: "I", owning_organisation: organisation) |
||||
create_list(:scheme, 5, :duplicate, owning_organisation: organisation2) |
||||
end |
||||
|
||||
it "creates a csv with correct duplicate numbers" do |
||||
expect(storage_service).to receive(:write_file).with(/scheme-duplicates-.*\.csv/, "\uFEFFOrganisation id,Number of duplicate sets,Total duplicate schemes\n#{organisation.id},2,5\n#{organisation2.id},1,5\n") |
||||
expect(Rails.logger).to receive(:info).with("Download URL: #{test_url}") |
||||
task.invoke |
||||
end |
||||
end |
||||
end |
||||
end |
||||
|
||||
describe "count_duplicates:location_duplicates_per_org", type: :task do |
||||
subject(:task) { Rake::Task["count_duplicates:location_duplicates_per_org"] } |
||||
|
||||
let(:storage_service) { instance_double(Storage::S3Service) } |
||||
let(:test_url) { "test_url" } |
||||
|
||||
before do |
||||
Rake.application.rake_require("tasks/count_duplicates") |
||||
Rake::Task.define_task(:environment) |
||||
task.reenable |
||||
end |
||||
|
||||
context "when the rake task is run" do |
||||
context "and there are no duplicate locations" do |
||||
before do |
||||
create(:organisation) |
||||
end |
||||
|
||||
it "creates a csv with headers only" do |
||||
expect(storage_service).to receive(:write_file).with(/location-duplicates-.*\.csv/, "\uFEFFOrganisation id,Duplicate sets within individual schemes,Duplicate locations within individual schemes,All duplicate sets,All duplicates\n") |
||||
expect(Rails.logger).to receive(:info).with("Download URL: #{test_url}") |
||||
task.invoke |
||||
end |
||||
end |
||||
|
||||
context "and there are duplicate locations" do |
||||
let(:organisation) { create(:organisation) } |
||||
let(:scheme_a) { create(:scheme, :duplicate, owning_organisation: organisation) } |
||||
let(:scheme_b) { create(:scheme, :duplicate, owning_organisation: organisation) } |
||||
let(:scheme_c) { create(:scheme, owning_organisation: organisation) } |
||||
let(:organisation2) { create(:organisation) } |
||||
let(:scheme2) { create(:scheme, owning_organisation: organisation2) } |
||||
let(:scheme3) { create(:scheme, owning_organisation: organisation2) } |
||||
|
||||
before do |
||||
create_list(:location, 2, postcode: "A1 1AB", mobility_type: "M", scheme: scheme_a) # Location A |
||||
create_list(:location, 1, postcode: "A1 1AB", mobility_type: "A", scheme: scheme_a) # Location B |
||||
|
||||
create_list(:location, 1, postcode: "A1 1AB", mobility_type: "M", scheme: scheme_b) # Location A |
||||
create_list(:location, 1, postcode: "A1 1AB", mobility_type: "A", scheme: scheme_b) # Location B |
||||
create_list(:location, 2, postcode: "A1 1AB", mobility_type: "N", scheme: scheme_b) # Location C |
||||
|
||||
create_list(:location, 2, postcode: "A1 1AB", mobility_type: "A", scheme: scheme_c) # Location B |
||||
|
||||
create_list(:location, 5, postcode: "A1 1AB", mobility_type: "M", scheme: scheme2) |
||||
create_list(:location, 2, postcode: "A1 1AB", mobility_type: "M", scheme: scheme3) |
||||
end |
||||
|
||||
it "creates a csv with correct duplicate numbers" do |
||||
expect(storage_service).to receive(:write_file).with(/location-duplicates-.*\.csv/, "\uFEFFOrganisation id,Duplicate sets within individual schemes,Duplicate locations within individual schemes,All duplicate sets,All duplicates\n#{organisation.id},3,6,4,9\n#{organisation2.id},2,7,2,7\n") |
||||
expect(Rails.logger).to receive(:info).with("Download URL: #{test_url}") |
||||
task.invoke |
||||
end |
||||
end |
||||
end |
||||
end |
||||
end |
||||
Loading…
Reference in new issue