diff --git a/app/controllers/data_dumps_controller.rb b/app/controllers/data_dumps_controller.rb new file mode 100644 index 000000000..965c3ec3d --- /dev/null +++ b/app/controllers/data_dumps_controller.rb @@ -0,0 +1,121 @@ +class DataDumpsController < ApplicationController + + prepend_before_action :authenticate_user! + # load_and_authorize_resource + def index + authorize! :read, :read_data_dumps + sort = + case params[:sort] + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + when "start" + { start_date: { order: "asc" } } + when "-start" + { start_date: { order: "desc" } } + when "end" + { end_date: { order: "asc" } } + when "-end" + { end_date: { order: "desc"} } + else + { created_at: { order: "desc" } } + end + + page = page_from_params(params) + + response = DataDump.query( + page: page, + sort: sort, + scope: params[:scope] + ) + + begin + total = response.results.total + total_pages = page[:size].positive? ? (total.to_f / page[:size]).ceil : 0 + + data_dumps = response.results + + options = {} + options[:meta] = { + total: total, + "totalPages" => total_pages, + page: page[:number] + }.compact + + options[:links] = { + self: request.original_url, + next: + if data_dumps.blank? || page[:number] == total_pages + nil + else + request.base_url + "/data_dumps?" + + { "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact.to_query + end, + prev: + if page[:number] == 1 || page[:number] == 0 + nil + elsif data_dumps.blank? + # use the max page size + request.base_url + "/data_dumps?" + + { "page[number]" => total_pages, + "page[size]" => page[:size], + sort: params[:sort], + }.compact.to_query + else + request.base_url + "/data_dumps?" + + { "page[number]" => page[:number] - 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact.to_query + end + }.compact + + render json: + DataDumpSerializer.new(data_dumps, options).serialized_json, status: :ok + + rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e + Raven.capture_exception(e) + + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) + + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request + end + end + + def show + authorize! :read, :read_data_dumps + data_dump = DataDump.where(uid: params[:id]).first + if data_dump.blank? || + ( + data_dump.aasm_state != "complete" + # TODO: Add conditional check for role here + ) + fail ActiveRecord::RecordNotFound + end + render json: DataDumpSerializer.new(data_dump).serialized_json, status: :ok + end + + def latest + authorize! :read, :read_data_dumps + data_dump = DataDump.where(scope: params[:scope], aasm_state: "complete").order(end_date: :desc).first + if data_dump.blank? || + ( + data_dump.aasm_state != "complete" + # TODO: Add conditional check for role here + ) + fail ActiveRecord::RecordNotFound + end + render json: DataDumpSerializer.new(data_dump).serialized_json, status: :ok + end +end diff --git a/app/models/ability.rb b/app/models/ability.rb index 36e5d8eb3..ab62ddc72 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -23,6 +23,7 @@ def initialize(user) can :export, :contacts can :export, :organizations can :export, :repositories + can :read, :read_data_dumps elsif user.role_id == "staff_user" can %i[read read_billing_information read_contact_information read_analytics], :all elsif user.role_id == "consortium_admin" && user.provider_id.present? diff --git a/app/models/data_dump.rb b/app/models/data_dump.rb new file mode 100644 index 000000000..6a9a9d74c --- /dev/null +++ b/app/models/data_dump.rb @@ -0,0 +1,122 @@ +# frozen_string_literal: true + +class DataDump < ApplicationRecord + include Elasticsearch::Model + + include Indexable + include AASM + + validates_presence_of :uid + validates_presence_of :scope + validates_presence_of :start_date + validates_presence_of :end_date + + validates_uniqueness_of :uid, message: "This Data Dump UID is already in use" + + validates_inclusion_of :scope, in: %w(metadata link), allow_blank: false + + aasm whiny_transitions: false do + # initial state should prevent public visibility + state :generating, initial: true + # we might add more here in the future depending on the granularity of status updates we wish to provide + # but for now, we have a state for when the dump is done and being transferred to S3 and one for when it is + # ready to be downloaded + state :storing, :complete + + event :store do + transitions from: :generating, to: :storing + end + + event :release do + transitions from: :storing, to: :complete + end + end + + if Rails.env.test? + index_name "data-dumps-test#{ENV['TEST_ENV_NUMBER']}" + elsif ENV["ES_PREFIX"].present? + index_name "data-dumps-#{ENV['ES_PREFIX']}" + else + index_name "data-dumps" + end + + settings index: { + number_of_shards: 1, + analysis: { + analyzer: { + string_lowercase: { + tokenizer: "keyword", filter: %w[lowercase] + }, + }, + normalizer: { + keyword_lowercase: { type: "custom", filter: %w[lowercase] }, + }, + }, + } do + mapping dynamic: "false" do + indexes :id + indexes :uid, type: :text + indexes :scope, type: :keyword + indexes :description, type: :text + indexes :start_date, type: :date, format: :date_optional_time + indexes :end_date, type: :date, format: :date_optional_time + indexes :records, type: :integer + indexes :checksum, type: :text + indexes :file_path, type: :text + indexes :aasm_state, type: :keyword + indexes :created_at, type: :date, format: :date_optional_time, + fields: { + created_sort: { type: :date } + } + indexes :updated_at, type: :date, format: :date_optional_time, + fields: { + updated_sort: { type: :date } + } + end + end + + def self.query_aggregations + {} + end + + def self.query(options = {}) + + options[:page] ||= {} + options[:page][:number] ||= 1 + options[:page][:size] ||= 25 + + from = ((options.dig(:page, :number) || 1) - 1) * (options.dig(:page, :size) || 25) + sort = options[:sort] + + filter = [] + if options[:scope].present? + filter << { term: { scope: options[:scope].downcase } } + end + + es_query = {bool: {filter: filter}} + + if options.fetch(:page, {}).key?(:cursor) + __elasticsearch__.search( + { + size: options.dig(:page, :size), + search_after: search_after, + sort: sort, + query: es_query, + track_total_hits: true, + }.compact, + ) + else + __elasticsearch__.search( + { + size: options.dig(:page, :size), + from: from, + sort: sort, + query: es_query, + track_total_hits: true, + }.compact, + ) + end + + end + +end diff --git a/app/serializers/data_dump_serializer.rb b/app/serializers/data_dump_serializer.rb new file mode 100644 index 000000000..273c12eb7 --- /dev/null +++ b/app/serializers/data_dump_serializer.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +class DataDumpSerializer + include FastJsonapi::ObjectSerializer + set_key_transform :camel_lower + set_type "data-dump" + set_id :uid + + attributes :description, + :scope, + :start_date, + :end_date, + :records, + :checksum, + :download_link, + :created_at, + :updated_at + + attribute :download_link do |object| + "https://example.com/#{object.file_path}" + end +end \ No newline at end of file diff --git a/config/routes.rb b/config/routes.rb index 84da67052..f4277a1ea 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -230,6 +230,10 @@ resources :repository_prefixes, path: "repository-prefixes" resources :resource_types, path: "resource-types", only: %i[show index] + get "/data_dumps/:scope/latest", to: "data_dumps#latest", constraints: { scope: /(metadata|link)/ } + get "/data_dumps/:scope", to: "data_dumps#index", constraints: { scope: /(metadata|link)/ } + resources :data_dumps, constraints: { id: /[A-Za-z0-9_-]+/ }, only: %i[show index] + # custom routes for maintenance tasks post ":username", to: "datacite_dois#show", as: :user diff --git a/db/migrate/20230711130313_create_data_dumps.rb b/db/migrate/20230711130313_create_data_dumps.rb new file mode 100644 index 000000000..ee72893e3 --- /dev/null +++ b/db/migrate/20230711130313_create_data_dumps.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +class CreateDataDumps < ActiveRecord::Migration[6.1] + def change + create_table :data_dumps do |t| + t.string :uid, null: false + t.string :scope, null: false + t.text :description + t.datetime :start_date, null: false + t.datetime :end_date, null: false + t.bigint :records + t.string :checksum + t.string :file_path + t.string :aasm_state + + t.timestamps + + t.index %w[uid], { name: "index_data_dumps_on_uid", unique: true } + t.index %w[updated_at], name: "index_data_dumps_on_updated_at" + t.index %w[scope], name: "index_data_dumps_on_scope" + t.index %w[aasm_state], name: "index_data_dumps_on_aasm_state" + end + end +end diff --git a/db/schema.rb b/db/schema.rb index b266d86d6..b78ef0f86 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -12,7 +12,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 2023_01_23_122711) do +ActiveRecord::Schema.define(version: 2023_07_11_130313) do create_table "active_storage_attachments", charset: "utf8mb4", force: :cascade do |t| t.string "name", limit: 191, null: false t.string "record_type", null: false @@ -137,6 +137,24 @@ t.datetime "deleted_at" end + create_table "data_dumps", charset: "utf8mb4", force: :cascade do |t| + t.string "uid", null: false + t.string "scope", null: false + t.text "description" + t.datetime "start_date", null: false + t.datetime "end_date", null: false + t.bigint "records" + t.string "checksum" + t.string "file_path" + t.string "aasm_state" + t.datetime "created_at", precision: 6, null: false + t.datetime "updated_at", precision: 6, null: false + t.index ["aasm_state"], name: "index_data_dumps_on_aasm_state" + t.index ["scope"], name: "index_data_dumps_on_scope" + t.index ["uid"], name: "index_data_dumps_on_uid", unique: true + t.index ["updated_at"], name: "index_data_dumps_on_updated_at" + end + create_table "datacentre", charset: "utf8", force: :cascade do |t| t.text "comments", size: :long t.string "system_email", null: false diff --git a/spec/controllers/data_dumps_controller_spec.rb b/spec/controllers/data_dumps_controller_spec.rb new file mode 100644 index 000000000..7647b882f --- /dev/null +++ b/spec/controllers/data_dumps_controller_spec.rb @@ -0,0 +1,24 @@ +require 'rails_helper' + +RSpec.describe DataDumpsController, type: :controller do + + let(:token) { User.generate_token } + + describe "GET #index" do + it "returns http success" do + request.headers["Authorization"] = "Bearer " + token + get :index + expect(response).to have_http_status(:success) + end + end + + describe "GET #show" do + let(:data_dump) { create(:data_dump) } + it "returns http success" do + request.headers["Authorization"] = "Bearer " + token + get :show, params: { id: data_dump.uid } + expect(response).to have_http_status(:success) + end + end + +end diff --git a/spec/factories/data_dump.rb b/spec/factories/data_dump.rb new file mode 100644 index 000000000..9bf1e132f --- /dev/null +++ b/spec/factories/data_dump.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require "faker" + +FactoryBot.define do + factory :data_dump do + transient do + year { Faker::Number.within(2010..2021).to_s } + end + + uid { Faker::Internet.password(8).downcase } + scope { "metadata" } + description { "Test Metadata Data Dump Factory creation"} + start_date { "#{year}-01-01" } + end_date { "#{year}-12-31" } + records { Faker::Number.within(5_000_000..50_000_000) } + checksum { Faker::Crypto.sha256 } + created_at { Faker::Time.backward(1, :morning) } + updated_at { Faker::Time.backward(1, :evening) } + aasm_state { :complete } + end + + factory :data_dump_incomplete do + transient do + year { Faker::Number.within(2010..2021).to_s } + end + + uid { Faker::Internet.password(8).downcase } + scope { "metadata" } + description { "Test Metadata Data Dump Factory creation - incomplete"} + start_date { "#{year}-01-01" } + end_date { "#{year}-12-31" } + created_at { Faker::Time.backward(1, :morning) } + updated_at { Faker::Time.backward(1, :evening) } + aasm_state { :generating } + end +end \ No newline at end of file diff --git a/spec/models/data_dump_spec.rb b/spec/models/data_dump_spec.rb new file mode 100644 index 000000000..f84e27f8b --- /dev/null +++ b/spec/models/data_dump_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require "rails_helper" + +RSpec.describe DataDump, type: :model, elasticsearch: true do + describe "Validations" do + it { should validate_presence_of(:uid) } + it { should validate_presence_of(:scope) } + it { should validate_presence_of(:start_date) } + it { should validate_presence_of(:end_date) } + it { should validate_inclusion_of(:scope).in_array(%w(metadata link)) } + it { should allow_value("metadata").for(:scope).on(:create) } + it { should allow_value("link").for(:scope).on(:create) } + it { should_not allow_value("invalid").for(:scope).on(:create) } + end +end diff --git a/spec/requests/data_dumps_spec.rb b/spec/requests/data_dumps_spec.rb new file mode 100644 index 000000000..5949caf2b --- /dev/null +++ b/spec/requests/data_dumps_spec.rb @@ -0,0 +1,365 @@ +# frozen_string_literal: true + +require "rails_helper" + +describe DataDumpsController, type: :request, elasticsearch: true do + let(:data_dump) { create(:data_dump, uid: "test_dump") } + let(:token) { User.generate_token } + let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + token } } + let(:bad_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic 12345" } } + let(:user_token) { User.generate_token(role_id: "user") } + let(:user_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + user_token } } + + + describe "GET /data_dumps", elasticsearch: true do + let!(:data_dumps) { create_list(:data_dump, 10) } + + before do + DataDump.import + sleep 1 + end + context "with valid authorization" do + it "returns data dumps" do + get "/data_dumps", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(10) + expect(json.dig("meta", "total")).to eq(10) + end + + it "returns data dumps with pagination" do + get "/data_dumps?page[number]=1&page[size]=4", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(4) + expect(json.dig("meta", "total")).to eq(10) + next_link_absolute = Addressable::URI.parse(json.dig("links", "next")) + next_link = next_link_absolute.path + "?" + next_link_absolute.query + expect(next_link).to eq("/data_dumps?page%5Bnumber%5D=2&page%5Bsize%5D=4") + expect(json.dig("links", "prev")).to be_nil + + get next_link, nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(4) + expect(json.dig("meta", "total")).to eq(10) + next_link_absolute = Addressable::URI.parse(json.dig("links", "next")) + next_link = next_link_absolute.path + "?" + next_link_absolute.query + expect(next_link).to eq("/data_dumps?page%5Bnumber%5D=3&page%5Bsize%5D=4") + prev_link_absolute = Addressable::URI.parse(json.dig("links", "prev")) + prev_link = prev_link_absolute.path + "?" + prev_link_absolute.query + expect(prev_link).to eq("/data_dumps?page%5Bnumber%5D=1&page%5Bsize%5D=4") + + get next_link, nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(2) + expect(json.dig("meta", "total")).to eq(10) + expect(json.dig("links", "next")).to be_nil + prev_link_absolute = Addressable::URI.parse(json.dig("links", "prev")) + prev_link = prev_link_absolute.path + "?" + prev_link_absolute.query + expect(prev_link).to eq("/data_dumps?page%5Bnumber%5D=2&page%5Bsize%5D=4") + end + + it "returns correct page links when results is exactly divisible by page size" do + get "/data_dumps?page[number]=1&page[size]=5", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(5) + expect(json.dig("meta", "total")).to eq(10) + expect(json.dig("links", "prev")).to be_nil + next_link_absolute = Addressable::URI.parse(json.dig("links", "next")) + next_link = next_link_absolute.path + "?" + next_link_absolute.query + expect(next_link).to eq("/data_dumps?page%5Bnumber%5D=2&page%5Bsize%5D=5") + + get next_link, nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(5) + expect(json.dig("meta", "total")).to eq(10) + expect(json.dig("links", "next")).to be_nil + prev_link_absolute = Addressable::URI.parse(json.dig("links", "prev")) + prev_link = prev_link_absolute.path + "?" + prev_link_absolute.query + expect(prev_link).to eq("/data_dumps?page%5Bnumber%5D=1&page%5Bsize%5D=5") + end + + it "returns a blank resultset when page is above max page" do + get "/data_dumps?page[number]=3&page[size]=5", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(0) + expect(json.dig("meta", "totalPages")).to eq(2) + expect(json.dig("meta", "page")).to eq(3) + expect(json.dig("links", "next")).to be_nil + prev_link_absolute = Addressable::URI.parse(json.dig("links", "prev")) + prev_link = prev_link_absolute.path + "?" + prev_link_absolute.query + expect(prev_link).to eq("/data_dumps?page%5Bnumber%5D=2&page%5Bsize%5D=5") + end + end + + context "without authorization" do + it "returns access denied" do + get "/data_dumps" + expect(last_response.status).to eq(401) + end + end + + context "with bad authorization" do + it "returns access denied" do + get "/data_dumps", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + + context "with insufficient permission" do + it "returns access denied" do + get "/data_dumps", nil, user_headers + expect(last_response.status).to eq(403) + end + end + end + + describe "GET /data_dumps/:id" do + context "with valid authorization" do + context "when the record exists" do + it "returns the record" do + get "/data_dumps/#{data_dump.uid}", nil, headers + + expect(last_response.status).to eq(200) + expect(json.dig("data", "attributes", "description")).to eq("Test Metadata Data Dump Factory creation") + expect(json.dig("data", "attributes", "startDate")).to eq(data_dump.start_date.rfc3339(3)) + end + end + + context "when the record does not exist" do + it "returns status code 404" do + get "/data_dumps/invalid_id", nil, headers + + expect(last_response.status).to eq(404) + expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + end + end + end + + context "without authorization" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}" + expect(last_response.status).to eq(401) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id" + expect(last_response.status).to eq(401) + end + end + end + + context "with bad authorization" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + end + + context "with insufficient permission" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}", nil, user_headers + expect(last_response.status).to eq(403) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id", nil, user_headers + expect(last_response.status).to eq(403) + end + end + end + end + + describe "GET /data_dumps/:id" do + context "with valid authorization" do + context "when the record exists" do + it "returns the record" do + get "/data_dumps/#{data_dump.uid}", nil, headers + + expect(last_response.status).to eq(200) + expect(json.dig("data", "attributes", "description")).to eq("Test Metadata Data Dump Factory creation") + expect(json.dig("data", "attributes", "startDate")).to eq(data_dump.start_date.rfc3339(3)) + end + end + + context "when the record does not exist" do + it "returns status code 404" do + get "/data_dumps/invalid_id", nil, headers + + expect(last_response.status).to eq(404) + expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + end + end + end + + context "without authorization" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}" + expect(last_response.status).to eq(401) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id" + expect(last_response.status).to eq(401) + end + end + end + + context "with bad authorization" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + end + + context "with insufficient permission" do + context "when the record exists" do + it "returns access denied" do + get "/data_dumps/#{data_dump.uid}", nil, user_headers + expect(last_response.status).to eq(403) + end + end + + context "when the record does not exist" do + it "returns access denied" do + get "/data_dumps/invalid_id", nil, user_headers + expect(last_response.status).to eq(403) + end + end + end + end + + describe "GET /data_dumps/:scope", elasticsearch: true do + let!(:data_dumps) { create_list(:data_dump, 10) } + let!(:link_dumps) { create_list(:data_dump, 10, {scope: "link"}) } + + before do + DataDump.import + sleep 1 + end + + context "with valid authorization" do + it "returns metadata data dumps" do + get "/data_dumps/metadata", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(10) + expect(json.dig("meta", "total")).to eq(10) + end + + it "returns link data dumps" do + get "/data_dumps/link", nil, headers + + expect(last_response.status).to eq(200) + expect(json["data"].size).to eq(10) + expect(json.dig("meta", "total")).to eq(10) + end + end + + context "without authorization" do + it "returns access denied" do + get "/data_dumps/metadata" + expect(last_response.status).to eq(401) + end + end + + context "with bad authorization" do + it "returns access denied" do + get "/data_dumps/metadata", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + + context "with insufficient permission" do + it "returns access denied" do + get "/data_dumps/metadata", nil, user_headers + expect(last_response.status).to eq(403) + end + end + end + + describe "GET /data_dumps/:scope/latest", elasticsearch: true do + let!(:data_dumps) { create_list(:data_dump, 10) } + let!(:link_dumps) { create_list(:data_dump, 10, {scope: "link"}) } + let!(:latest_data) { create(:data_dump, uid: "latest_data", end_date:"2023-12-31")} + let!(:latest_link) { create(:data_dump, uid: "latest_link", scope: "link", end_date:"2023-12-31")} + before do + DataDump.import + sleep 1 + end + + context "with valid authorization" do + it "returns latest metadata data dump" do + get "/data_dumps/metadata/latest", nil, headers + + expect(last_response.status).to eq(200) + expect(json.dig("data", "id")).to eq("latest_data") + expect(json.dig("data", "attributes", "endDate")).to eq("2023-12-31T00:00:00.000Z") + expect(json.dig("data", "attributes", "startDate")).to eq(latest_data.start_date.rfc3339(3)) + end + + it "returns latest link data dump" do + get "/data_dumps/link/latest", nil, headers + + expect(last_response.status).to eq(200) + expect(json.dig("data", "id")).to eq("latest_link") + expect(json.dig("data", "attributes", "endDate")).to eq("2023-12-31T00:00:00.000Z") + expect(json.dig("data", "attributes", "startDate")).to eq(latest_link.start_date.rfc3339(3)) + end + end + + context "without authorization" do + it "returns access denied" do + get "/data_dumps/metadata/latest" + expect(last_response.status).to eq(401) + end + end + + context "with bad authorization" do + it "returns access denied" do + get "/data_dumps/metadata/latest", nil, bad_headers + expect(last_response.status).to eq(401) + end + end + + context "with insufficient permission" do + it "returns access denied" do + get "/data_dumps/metadata/latest", nil, user_headers + expect(last_response.status).to eq(403) + end + end + end + +end diff --git a/spec/support/elasticsearch_helper.rb b/spec/support/elasticsearch_helper.rb index f677494e9..f878f96dd 100644 --- a/spec/support/elasticsearch_helper.rb +++ b/spec/support/elasticsearch_helper.rb @@ -13,6 +13,7 @@ ProviderPrefix, Contact, ReferenceRepository, + DataDump, ].freeze RSpec.configure do |config|