From e81499aa9b3f58b5e5739f6d9b300a07c9066a99 Mon Sep 17 00:00:00 2001 From: git-hyagi <45576767+git-hyagi@users.noreply.github.com> Date: Mon, 6 Jan 2025 12:41:42 -0300 Subject: [PATCH] Update the tests for CONTENT_ORIGIN being optional --- .github/workflows/scripts/install.sh | 2 +- docs/admin/reference/settings.md | 12 +++++---- .../functional/api/test_x509_certguard.py | 11 ++++++-- pulp_file/pytest_plugin.py | 12 +++++++++ pulp_file/tests/functional/api/test_acs.py | 3 ++- .../tests/functional/api/test_auto_publish.py | 6 +++-- .../functional/api/test_crud_content_unit.py | 3 ++- .../tests/functional/api/test_domains.py | 6 +++-- .../functional/api/test_download_policies.py | 22 ++++++++-------- .../tests/functional/api/test_mime_types.py | 4 ++- ...0127_add_content_origin_to_upstreampulp.py | 18 +++++++++++++ pulpcore/app/models/publication.py | 7 +++++- pulpcore/app/models/replica.py | 2 ++ pulpcore/app/replica.py | 10 ++++++++ pulpcore/app/serializers/fields.py | 7 +++++- pulpcore/app/serializers/replica.py | 11 ++++++++ pulpcore/app/serializers/status.py | 3 +++ pulpcore/pytest_plugin.py | 5 +++- .../tests/functional/api/test_replication.py | 8 ++++++ pulpcore/tests/functional/api/test_status.py | 3 +-- .../api/using_plugin/test_content_access.py | 3 ++- .../api/using_plugin/test_content_cache.py | 25 +++++++++++-------- .../api/using_plugin/test_content_delivery.py | 18 ++++++++----- .../using_plugin/test_content_directory.py | 4 +-- .../api/using_plugin/test_content_path.py | 8 ++++-- .../using_plugin/test_content_promotion.py | 6 +++-- .../api/using_plugin/test_contentguard.py | 24 +++++++++++------- .../using_plugin/test_reclaim_disk_space.py | 16 +++++++----- .../api/using_plugin/test_repo_versions.py | 4 ++- template_config.yml | 1 + 30 files changed, 194 insertions(+), 70 deletions(-) create mode 100644 pulpcore/app/migrations/0127_add_content_origin_to_upstreampulp.py diff --git a/.github/workflows/scripts/install.sh b/.github/workflows/scripts/install.sh index e8bebd1f73..775fe328fa 100755 --- a/.github/workflows/scripts/install.sh +++ b/.github/workflows/scripts/install.sh @@ -119,7 +119,7 @@ if [ "$TEST" = "azure" ]; then - ./azurite:/etc/pulp\ command: "azurite-blob --blobHost 0.0.0.0"' vars/main.yaml sed -i -e '$a azure_test: true\ -pulp_scenario_settings: {"api_root_rewrite_header": "X-API-Root", "domain_enabled": true, "rest_framework__default_permission_classes": ["pulpcore.plugin.access_policy.DefaultAccessPolicy"]}\ +pulp_scenario_settings: {"api_root_rewrite_header": "X-API-Root", "content_origin": null, "domain_enabled": true, "rest_framework__default_permission_classes": ["pulpcore.plugin.access_policy.DefaultAccessPolicy"]}\ pulp_scenario_env: {}\ ' vars/main.yaml fi diff --git a/docs/admin/reference/settings.md b/docs/admin/reference/settings.md index a706800c77..93d7564861 100644 --- a/docs/admin/reference/settings.md +++ b/docs/admin/reference/settings.md @@ -1,6 +1,6 @@ # Settings -There is one required settings, although specific plugins may have additional required +There is one required setting, although specific plugins may have additional required settings. - `SECRET_KEY ` @@ -196,11 +196,13 @@ this setting only applies to uploads created after the change. ### CONTENT_ORIGIN -A string containing the protocol, fqdn, and port where the content app is reachable by -users. This is used by `pulpcore` and various plugins when referring users to the content app. +A string containing the protocol, fqdn, and port where the content app is reachable by users. +This is used by `pulpcore` and various plugins when referring users to the content app. For example if the API should refer users to content at using http to pulp.example.com on port -24816, (the content default port), you would set: `http://pulp.example.com:24816`. The default is -`None`. When set to `None`, the API returns relative URLs (without the protocol, fqdn, and port). +24816, (the content default port), you would set: `https://pulp.example.com:24816`. The default is `None`. +When set to `None`, the `base_url` for Distributions is a relative path. +This means the API returns relative URLs without the protocol, fqdn, and port. + ### HIDE_GUARDED_DISTRIBUTIONS diff --git a/pulp_certguard/tests/functional/api/test_x509_certguard.py b/pulp_certguard/tests/functional/api/test_x509_certguard.py index b190016456..06f4ae788f 100644 --- a/pulp_certguard/tests/functional/api/test_x509_certguard.py +++ b/pulp_certguard/tests/functional/api/test_x509_certguard.py @@ -77,11 +77,18 @@ def parameterized_cert(request): class TestX509CertGuard: """A test class to share the costly distribution setup.""" - def test_download(self, x509_guarded_distribution, parameterized_cert, content_path): + def test_download( + self, + x509_guarded_distribution, + parameterized_cert, + content_path, + file_distribution_base_url, + ): cert_data, status_code = parameterized_cert + distribution_base_url = file_distribution_base_url(x509_guarded_distribution.base_url) response = requests.get( - urljoin(x509_guarded_distribution.base_url, content_path), + urljoin(distribution_base_url, content_path), headers=cert_data and {"X-CLIENT-CERT": cert_data}, ) assert response.status_code == status_code diff --git a/pulp_file/pytest_plugin.py b/pulp_file/pytest_plugin.py index bd468d1ec6..303b02e5fe 100644 --- a/pulp_file/pytest_plugin.py +++ b/pulp_file/pytest_plugin.py @@ -380,3 +380,15 @@ def _generate_server_and_remote(*, manifest_path, policy): return server, remote yield _generate_server_and_remote + + +# if content_origin == None, base_url will return the relative path and +# we need to add the hostname to run the tests +@pytest.fixture +def file_distribution_base_url(bindings_cfg): + def _file_distribution_base_url(base_url): + if base_url.startswith("http"): + return base_url + return bindings_cfg.host + base_url + + return _file_distribution_base_url diff --git a/pulp_file/tests/functional/api/test_acs.py b/pulp_file/tests/functional/api/test_acs.py index b4150b3112..523c83c42c 100644 --- a/pulp_file/tests/functional/api/test_acs.py +++ b/pulp_file/tests/functional/api/test_acs.py @@ -193,6 +193,7 @@ def test_acs_sync_with_paths( def test_serving_acs_content( file_bindings, file_repo, + file_distribution_base_url, file_distribution_factory, basic_manifest_path, gen_object_with_cleanup, @@ -245,7 +246,7 @@ def test_serving_acs_content( # Download one of the files and assert that it has the right checksum and that it is downloaded # from the ACS server. content_unit = list(expected_files)[0] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(file_distribution_base_url(distribution.base_url), content_unit[0]) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() expected_checksum = content_unit[1] diff --git a/pulp_file/tests/functional/api/test_auto_publish.py b/pulp_file/tests/functional/api/test_auto_publish.py index 50ca0918f1..a8f998ee61 100644 --- a/pulp_file/tests/functional/api/test_auto_publish.py +++ b/pulp_file/tests/functional/api/test_auto_publish.py @@ -17,6 +17,7 @@ def file_repo_with_auto_publish(file_repository_factory): @pytest.mark.parallel def test_auto_publish_and_distribution( file_bindings, + file_distribution_base_url, file_repo_with_auto_publish, file_remote_ssl_factory, basic_manifest_path, @@ -32,6 +33,7 @@ def test_auto_publish_and_distribution( file_bindings.DistributionsFileApi, {"name": "foo", "base_path": "bar/foo", "repository": repo.pulp_href}, ) + distribution_base_url = file_distribution_base_url(distribution.base_url) # Assert that the repository is at version 0 and that there are no publications associated with # this Repository and that the distribution doesn't have a publication associated with it. @@ -67,7 +69,7 @@ def test_auto_publish_and_distribution( # Download the custom manifest files_in_first_publication = get_files_in_manifest( - "{}{}".format(distribution.base_url, publication.manifest) + "{}{}".format(distribution_base_url, publication.manifest) ) assert files_in_first_publication == expected_files @@ -80,7 +82,7 @@ def test_auto_publish_and_distribution( ) repo = file_bindings.RepositoriesFileApi.read(repo.pulp_href) files_in_second_publication = get_files_in_manifest( - "{}{}".format(distribution.base_url, publication.manifest) + "{}{}".format(distribution_base_url, publication.manifest) ) files_added = files_in_second_publication - files_in_first_publication assert repo.latest_version_href.endswith("/versions/2/") diff --git a/pulp_file/tests/functional/api/test_crud_content_unit.py b/pulp_file/tests/functional/api/test_crud_content_unit.py index 6d7b6c07af..487438613c 100644 --- a/pulp_file/tests/functional/api/test_crud_content_unit.py +++ b/pulp_file/tests/functional/api/test_crud_content_unit.py @@ -260,6 +260,7 @@ def test_create_file_from_url( file_bindings, file_repository_factory, file_remote_factory, + file_distribution_base_url, file_distribution_factory, basic_manifest_path, monitor_task, @@ -283,7 +284,7 @@ def test_create_file_from_url( # Test create w/ url for already existing content response = file_bindings.ContentFilesApi.create( - file_url=f"{distro.base_url}1.iso", + file_url=f"{file_distribution_base_url(distro.base_url)}1.iso", relative_path="1.iso", ) task = monitor_task(response.task) diff --git a/pulp_file/tests/functional/api/test_domains.py b/pulp_file/tests/functional/api/test_domains.py index d068339648..417eb66941 100644 --- a/pulp_file/tests/functional/api/test_domains.py +++ b/pulp_file/tests/functional/api/test_domains.py @@ -179,6 +179,7 @@ def test_content_promotion( file_bindings, basic_manifest_path, file_remote_factory, + file_distribution_base_url, file_distribution_factory, gen_object_with_cleanup, monitor_task, @@ -217,14 +218,15 @@ def test_content_promotion( # Distribute Task distro = file_distribution_factory(publication=pub.pulp_href, pulp_domain=domain.name) + distro_base_url = file_distribution_base_url(distro.base_url) assert distro.publication == pub.pulp_href # Url structure should be host/CONTENT_ORIGIN/DOMAIN_PATH/BASE_PATH - assert domain.name == distro.base_url.rstrip("/").split("/")[-2] + assert domain.name == distro_base_url.rstrip("/").split("/")[-2] # Check that content can be downloaded from base_url for path in ("1.iso", "2.iso", "3.iso"): - download = download_file(f"{distro.base_url}{path}") + download = download_file(f"{distro_base_url}{path}") assert download.response_obj.status == 200 assert len(download.body) == 1024 diff --git a/pulp_file/tests/functional/api/test_download_policies.py b/pulp_file/tests/functional/api/test_download_policies.py index eeaa5a1fb7..921bc31df6 100644 --- a/pulp_file/tests/functional/api/test_download_policies.py +++ b/pulp_file/tests/functional/api/test_download_policies.py @@ -45,6 +45,7 @@ def test_download_policy( pulpcore_bindings, file_bindings, file_repo, + file_distribution_base_url, file_remote_ssl_factory, range_header_manifest_path, gen_object_with_cleanup, @@ -100,11 +101,12 @@ def test_download_policy( "repository": file_repo.pulp_href, }, ) + distribution_base_url = file_distribution_base_url(distribution.base_url) # Assert that un-published content is not available for expected_file in expected_files: with pytest.raises(ClientResponseError) as exc: - content_unit_url = urljoin(distribution.base_url, expected_file[0]) + content_unit_url = urljoin(distribution_base_url, expected_file[0]) download_file(content_unit_url) assert exc.value.status == 404 @@ -127,7 +129,7 @@ def test_download_policy( assert process.returncode == 0 content_artifact_created_date = process.stdout.decode().strip() # Download the listing page for the 'foo' directory - distribution_html_page = download_file(f"{distribution.base_url}foo") + distribution_html_page = download_file(f"{distribution_base_url}foo") # Assert that requesting a path inside a distribution without a trailing / returns a 301 assert distribution_html_page.response_obj.history[0].status == 301 soup = BeautifulSoup(distribution_html_page.body, "html.parser") @@ -141,7 +143,7 @@ def test_download_policy( # Download one of the files and assert that it has the right checksum expected_files_list = list(expected_files) content_unit = expected_files_list[0] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() expected_checksum = content_unit[1] @@ -160,32 +162,32 @@ def test_download_policy( range_header = {"Range": "bytes=1048586-1049586"} num_bytes = 1001 content_unit = expected_files_list[1] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) # Assert proper download with range requests spanning multiple chunks of downloader range_header = {"Range": "bytes=1048176-2248576"} num_bytes = 1200401 content_unit = expected_files_list[2] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) # Assert that multiple requests with different Range header values work as expected range_header = {"Range": "bytes=1048176-2248576"} num_bytes = 1200401 content_unit = expected_files_list[3] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) range_header = {"Range": "bytes=2042176-3248576"} num_bytes = 1206401 content_unit = expected_files_list[3] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) # Assert that range requests with a negative start value errors as expected content_unit = expected_files_list[4] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) # The S3 test API project doesn't handle invalid Range values correctly if settings.DEFAULT_FILE_STORAGE == "pulpcore.app.models.storage.FileSystem": with pytest.raises(ClientResponseError) as exc: @@ -195,7 +197,7 @@ def test_download_policy( # Assert that a range request with a start value larger than the content errors content_unit = expected_files_list[5] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) with pytest.raises(ClientResponseError) as exc: range_header = {"Range": "bytes=10485860-10485870"} download_file(content_unit_url, headers=range_header) @@ -205,7 +207,7 @@ def test_download_policy( range_header = {"Range": "bytes=4193804-4294304"} num_bytes = 500 content_unit = expected_files_list[6] - content_unit_url = urljoin(distribution.base_url, content_unit[0]) + content_unit_url = urljoin(distribution_base_url, content_unit[0]) _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) # Assert that artifacts were not downloaded if policy is not immediate diff --git a/pulp_file/tests/functional/api/test_mime_types.py b/pulp_file/tests/functional/api/test_mime_types.py index c84b6704fa..66ea461067 100644 --- a/pulp_file/tests/functional/api/test_mime_types.py +++ b/pulp_file/tests/functional/api/test_mime_types.py @@ -11,6 +11,7 @@ @pytest.mark.parallel def test_content_types( file_bindings, + file_distribution_base_url, file_repo_with_auto_publish, file_content_unit_with_name_factory, gen_object_with_cleanup, @@ -46,13 +47,14 @@ def test_content_types( repository=file_repo_with_auto_publish.pulp_href, ) distribution = gen_object_with_cleanup(file_bindings.DistributionsFileApi, data) + distribution_base_url = file_distribution_base_url(distribution.base_url) received_mimetypes = {} for extension, content_unit in files.items(): async def get_content_type(): async with aiohttp.ClientSession() as session: - url = urljoin(distribution.base_url, content_unit.relative_path) + url = urljoin(distribution_base_url, content_unit.relative_path) async with session.get(url) as response: return response.headers.get("Content-Type") diff --git a/pulpcore/app/migrations/0127_add_content_origin_to_upstreampulp.py b/pulpcore/app/migrations/0127_add_content_origin_to_upstreampulp.py new file mode 100644 index 0000000000..7d07755909 --- /dev/null +++ b/pulpcore/app/migrations/0127_add_content_origin_to_upstreampulp.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.16 on 2025-01-17 15:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0126_remoteartifact_failed_at'), + ] + + operations = [ + migrations.AddField( + model_name='upstreampulp', + name='content_origin', + field=models.TextField(null=True), + ), + ] diff --git a/pulpcore/app/models/publication.py b/pulpcore/app/models/publication.py index c19d5cf032..14b61ee3b6 100644 --- a/pulpcore/app/models/publication.py +++ b/pulpcore/app/models/publication.py @@ -728,7 +728,12 @@ def ensure_singleton(self): raise RuntimeError(f"This system already has a {cls.__name__}") def artifact_url(self, artifact): - origin = settings.CONTENT_ORIGIN.strip("/") + + # When CONTENT_ORIGIN == None we need to set origin as "/" so that the base_url will + # have the relative path like "/some/file/path", instead of "some/file/path" + origin = "/" + if settings.CONTENT_ORIGIN: + origin = settings.CONTENT_ORIGIN.strip("/") prefix = settings.CONTENT_PATH_PREFIX.strip("/") base_path = self.base_path.strip("/") if settings.DOMAIN_ENABLED: diff --git a/pulpcore/app/models/replica.py b/pulpcore/app/models/replica.py index 9012844738..ec63f0329a 100644 --- a/pulpcore/app/models/replica.py +++ b/pulpcore/app/models/replica.py @@ -32,6 +32,8 @@ class UpstreamPulp(BaseModel, AutoAddObjPermsMixin): last_replication = models.DateTimeField(null=True) + content_origin = models.TextField(null=True) + class Meta: unique_together = ("name", "pulp_domain") permissions = [ diff --git a/pulpcore/app/replica.py b/pulpcore/app/replica.py index 385ba394a9..3092b2839b 100644 --- a/pulpcore/app/replica.py +++ b/pulpcore/app/replica.py @@ -103,6 +103,16 @@ def create_or_update_remote(self, upstream_distribution): ): return None url = self.url(upstream_distribution) + if url.startswith("/"): + if self.server.content_origin: + url = self.server.content_origin + url + else: + raise Exception( + "UpstreamPulp needs to have content_origin defined because the downstream Pulp" + "does not know upstream's hostname from the distribution base_url (upstream" + "Pulp does not have the content_origin defined)." + ) + remote_fields_dict = {"url": url} remote_fields_dict.update(self.tls_settings) remote_fields_dict.update(self.remote_extra_fields(upstream_distribution)) diff --git a/pulpcore/app/serializers/fields.py b/pulpcore/app/serializers/fields.py index 6988c2646f..49aa34c8fe 100644 --- a/pulpcore/app/serializers/fields.py +++ b/pulpcore/app/serializers/fields.py @@ -311,7 +311,12 @@ class BaseURLField(serializers.CharField): """ def to_representation(self, value): - origin = settings.CONTENT_ORIGIN.strip("/") + + # When CONTENT_ORIGIN == None we need to set origin as "/" so that the base_url will + # have the relative path like "/some/file/path", instead of "some/file/path" + origin = "/" + if settings.CONTENT_ORIGIN: + origin = settings.CONTENT_ORIGIN.strip("/") prefix = settings.CONTENT_PATH_PREFIX.strip("/") base_path = value.base_path.strip("/") url = urljoin(origin, prefix + "/") diff --git a/pulpcore/app/serializers/replica.py b/pulpcore/app/serializers/replica.py index 614392d238..9700d44166 100644 --- a/pulpcore/app/serializers/replica.py +++ b/pulpcore/app/serializers/replica.py @@ -91,6 +91,16 @@ class UpstreamPulpSerializer(ModelSerializer, HiddenFieldsMixin): read_only=True, ) + content_origin = serializers.CharField( + help_text=_( + "Upstream server address. This field is used by downstream Pulp in case upstream Pulp" + "does not have content_origin set." + ), + allow_null=True, + allow_blank=True, + required=False, + ) + def validate_q_select(self, value): """Ensure we have a valid q_select expression.""" from pulpcore.app.viewsets import DistributionFilter @@ -116,4 +126,5 @@ class Meta: "hidden_fields", "q_select", "last_replication", + "content_origin", ) diff --git a/pulpcore/app/serializers/status.py b/pulpcore/app/serializers/status.py index a1f73d6f06..76e8d1a151 100644 --- a/pulpcore/app/serializers/status.py +++ b/pulpcore/app/serializers/status.py @@ -72,6 +72,9 @@ class ContentSettingsSerializer(serializers.Serializer): content_origin = serializers.CharField( help_text=_("The CONTENT_ORIGIN setting for this Pulp instance"), + allow_blank=True, + allow_null=True, + required=False, ) content_path_prefix = serializers.CharField( help_text=_("The CONTENT_PATH_PREFIX setting for this Pulp instance"), diff --git a/pulpcore/pytest_plugin.py b/pulpcore/pytest_plugin.py index 813d85c913..859bb33b6a 100644 --- a/pulpcore/pytest_plugin.py +++ b/pulpcore/pytest_plugin.py @@ -773,8 +773,11 @@ def pulp_api_v3_url(bindings_cfg, pulp_api_v3_path): @pytest.fixture(scope="session") -def pulp_content_url(pulp_settings, pulp_domain_enabled): +def pulp_content_url(bindings_cfg, pulp_settings, pulp_domain_enabled): url = f"{pulp_settings.CONTENT_ORIGIN}{pulp_settings.CONTENT_PATH_PREFIX}" + if not pulp_settings.CONTENT_ORIGIN: + url = f"{bindings_cfg.host}{pulp_settings.CONTENT_PATH_PREFIX}" + if pulp_domain_enabled: url += "default/" return url diff --git a/pulpcore/tests/functional/api/test_replication.py b/pulpcore/tests/functional/api/test_replication.py index d72358fdd5..df462d0d93 100644 --- a/pulpcore/tests/functional/api/test_replication.py +++ b/pulpcore/tests/functional/api/test_replication.py @@ -34,6 +34,7 @@ def test_replication( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream_pulp = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, upstream_pulp_body, pulp_domain=non_default_domain.name @@ -95,6 +96,7 @@ def test_replication_idempotence( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream_pulp = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, upstream_pulp_body, pulp_domain=replica_domain.name @@ -130,6 +132,7 @@ def test_replication_idempotence( "domain": replica_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream_pulp = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, upstream_pulp_body, pulp_domain=source_domain.name @@ -178,6 +181,7 @@ def test_replication_with_wrong_ca_cert( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, "ca_cert": """-----BEGIN CERTIFICATE----- MIIDyDCCArCgAwIBAgIJALMhZGyJtHXTMA0GCSqGSIb3DQEBCwUAMIGgMQswCQYD VQQGEwJTRzEUMBIGA1UECAwLTmV3IFlvcmsxFTATBgNVBAcMDERlZmF1bHQgQ2l0 @@ -254,6 +258,7 @@ def test_replication_optimization( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream_pulp = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, upstream_pulp_body, pulp_domain=non_default_domain.name @@ -436,6 +441,7 @@ def test_replicate_rbac( "domain": "default", "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream_pulp = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, @@ -518,6 +524,7 @@ def test_replicate_with_basic_q_select( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, } upstream = gen_object_with_cleanup( pulpcore_bindings.UpstreamPulpsApi, upstream_body, pulp_domain=dest_domain.name @@ -576,6 +583,7 @@ def test_replicate_with_complex_q_select( "domain": source_domain.name, "username": bindings_cfg.username, "password": bindings_cfg.password, + "content_origin": bindings_cfg.host, "q_select": "pulp_label_select='upstream=1' OR pulp_label_select='upstream=2'", } upstream = gen_object_with_cleanup( diff --git a/pulpcore/tests/functional/api/test_status.py b/pulpcore/tests/functional/api/test_status.py index 861b431b95..1e39f47c88 100644 --- a/pulpcore/tests/functional/api/test_status.py +++ b/pulpcore/tests/functional/api/test_status.py @@ -42,7 +42,7 @@ "content_settings": { "type": "object", "properties": { - "content_origin": {"type": "string"}, + "content_origin": {"type": ["string", "null"]}, "content_path_prefix": {"type": "string"}, }, "required": ["content_origin", "content_path_prefix"], @@ -138,7 +138,6 @@ def verify_get_response(status, expected_schema): assert status["versions"] != [] assert status["content_settings"] is not None - assert status["content_settings"]["content_origin"] is not None assert status["content_settings"]["content_path_prefix"] is not None assert status["storage"]["used"] is not None diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_access.py b/pulpcore/tests/functional/api/using_plugin/test_content_access.py index 9029374865..8890af120f 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_access.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_access.py @@ -15,6 +15,7 @@ @pytest.mark.parallel def test_file_remote_on_demand( basic_manifest_path, + file_distribution_base_url, file_distribution_factory, file_fixtures_root, file_repo_with_auto_publish, @@ -39,7 +40,7 @@ def test_file_remote_on_demand( # Create a distribution from the publication distribution = file_distribution_factory(repository=repo.pulp_href) # attempt to download_file() a file - download_file(f"{distribution.base_url}/1.iso") + download_file(f"{file_distribution_base_url(distribution.base_url)}/1.iso") @pytest.mark.parallel diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_cache.py b/pulpcore/tests/functional/api/using_plugin/test_content_cache.py index 0d4fd80dec..fc579ef27b 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_cache.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_cache.py @@ -19,6 +19,7 @@ def test_full_workflow( basic_manifest_path, file_remote_factory, file_bindings, + file_distribution_base_url, file_distribution_factory, monitor_task, redis_status, @@ -49,11 +50,12 @@ def _check_cache(url): monitor_task(file_bindings.PublicationsFileApi.create(body).task).created_resources[0] ) distro = file_distribution_factory(repository=repo.pulp_href) + distro_base_url = file_distribution_base_url(distro.base_url) # Checks responses are cached for content files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] for i, file in enumerate(files): - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file # Check that removing the repository from the distribution invalidates the cache @@ -61,7 +63,7 @@ def _check_cache(url): monitor_task(file_bindings.DistributionsFileApi.partial_update(distro.pulp_href, body).task) files = ["", "PULP_MANIFEST", "1.iso"] for file in files: - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (404, None) == _check_cache(url), file # Check that responses are cacheable after a repository is added back @@ -69,15 +71,16 @@ def _check_cache(url): monitor_task(file_bindings.DistributionsFileApi.partial_update(distro.pulp_href, body).task) files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] for i, file in enumerate(files): - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file # Add a new distribution and check that its responses are cached separately distro2 = file_distribution_factory(repository=repo.pulp_href) + distro2_base_url = file_distribution_base_url(distro2.base_url) url = urljoin(pulp_content_url, f"{distro2.base_path}/") files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] for i, file in enumerate(files): - url = urljoin(distro2.base_url, file) + url = urljoin(distro2_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file # Test that updating a repository pointed by multiple distributions invalidates all @@ -89,25 +92,25 @@ def _check_cache(url): pub3 = file_bindings.PublicationsFileApi.read(response.created_resources[1]) files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] for i, file in enumerate(files): - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file - url = urljoin(distro2.base_url, file) + url = urljoin(distro2_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file # Tests that deleting one distribution sharing a repository only invalidates its cache monitor_task(file_bindings.DistributionsFileApi.delete(distro2.pulp_href).task) files = ["", "PULP_MANIFEST", "2.iso"] for file in files: - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT") == _check_cache(url), file - url = urljoin(distro2.base_url, file) + url = urljoin(distro2_base_url, file) assert (404, None) == _check_cache(url), file # Test that deleting a publication not being served doesn't invalidate cache file_bindings.PublicationsFileApi.delete(pub2.pulp_href) files = ["", "PULP_MANIFEST", "2.iso"] for file in files: - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT") == _check_cache(url), file # Test that deleting the serving publication does invalidate the cache""" @@ -115,14 +118,14 @@ def _check_cache(url): file_bindings.PublicationsFileApi.delete(pub3.pulp_href) files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] for i, file in enumerate(files): - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file # Tests that deleting a repository invalidates the cache""" monitor_task(file_bindings.RepositoriesFileApi.delete(repo.pulp_href).task) files = ["", "PULP_MANIFEST", "2.iso"] for file in files: - url = urljoin(distro.base_url, file) + url = urljoin(distro_base_url, file) assert (404, None) == _check_cache(url), file # Tests that accessing a file that doesn't exist on content app gives 404 diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_delivery.py b/pulpcore/tests/functional/api/using_plugin/test_content_delivery.py index 0ab11eaa2c..87004b0b3a 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_delivery.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_delivery.py @@ -14,6 +14,7 @@ @pytest.mark.parallel def test_delete_remote_on_demand( + file_distribution_base_url, file_repo_with_auto_publish, file_remote_ssl_factory, file_bindings, @@ -33,6 +34,7 @@ def test_delete_remote_on_demand( # Create a distribution pointing to the repository distribution = file_distribution_factory(repository=repo.pulp_href) + distribution_base_url = file_distribution_base_url(distribution.base_url) # Download the manifest from the remote expected_file_list = list(get_files_in_manifest(remote.url)) @@ -40,7 +42,7 @@ def test_delete_remote_on_demand( # Delete the remote and assert that downloading content returns a 404 monitor_task(file_bindings.RemotesFileApi.delete(remote.pulp_href).task) with pytest.raises(ClientResponseError) as exc: - url = urljoin(distribution.base_url, expected_file_list[0][0]) + url = urljoin(distribution_base_url, expected_file_list[0][0]) download_file(url) assert exc.value.status == 404 @@ -50,7 +52,7 @@ def test_delete_remote_on_demand( monitor_task(file_bindings.RepositoriesFileApi.sync(repo.pulp_href, body).task) # Assert that files can now be downloaded from the distribution - content_unit_url = urljoin(distribution.base_url, expected_file_list[0][0]) + content_unit_url = urljoin(distribution_base_url, expected_file_list[0][0]) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() expected_checksum = expected_file_list[0][1] @@ -59,6 +61,7 @@ def test_delete_remote_on_demand( @pytest.mark.parallel def test_remote_artifact_url_update( + file_distribution_base_url, file_repo_with_auto_publish, file_remote_ssl_factory, file_bindings, @@ -79,13 +82,14 @@ def test_remote_artifact_url_update( # Create a distribution from the publication distribution = file_distribution_factory(repository=repo.pulp_href) + distribution_base_url = file_distribution_base_url(distribution.base_url) # Download the manifest from the remote expected_file_list = list(get_files_in_manifest(remote.url)) # Assert that trying to download content raises a 404 with pytest.raises(ClientResponseError) as exc: - url = urljoin(distribution.base_url, expected_file_list[0][0]) + url = urljoin(distribution_base_url, expected_file_list[0][0]) download_file(url) assert exc.value.status == 404 @@ -97,7 +101,7 @@ def test_remote_artifact_url_update( monitor_task( file_bindings.RepositoriesFileApi.sync(file_repo_with_auto_publish.pulp_href, body).task ) - content_unit_url = urljoin(distribution.base_url, expected_file_list[0][0]) + content_unit_url = urljoin(distribution_base_url, expected_file_list[0][0]) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() expected_checksum = expected_file_list[0][1] @@ -107,6 +111,7 @@ def test_remote_artifact_url_update( @pytest.mark.parallel def test_remote_content_changed_with_on_demand( write_3_iso_file_fixture_data_factory, + file_distribution_base_url, file_repo_with_auto_publish, file_remote_ssl_factory, file_bindings, @@ -135,7 +140,7 @@ def test_remote_content_changed_with_on_demand( expected_file_list = list(get_files_in_manifest(remote.url)) write_3_iso_file_fixture_data_factory("basic", overwrite=True) - get_url = urljoin(distribution.base_url, expected_file_list[0][0]) + get_url = urljoin(file_distribution_base_url(distribution.base_url), expected_file_list[0][0]) # WHEN (first request) result = subprocess.run(["curl", "-v", get_url], stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -156,6 +161,7 @@ def test_remote_content_changed_with_on_demand( @pytest.mark.parallel def test_handling_remote_artifact_on_demand_streaming_failure( write_3_iso_file_fixture_data_factory, + file_distribution_base_url, file_repo_with_auto_publish, file_remote_factory, file_bindings, @@ -215,7 +221,7 @@ def get_original_content_info(remote): return content_unit[0], content_unit[1] def download_from_distribution(content, distribution): - content_unit_url = urljoin(distribution.base_url, content_name) + content_unit_url = urljoin(file_distribution_base_url(distribution.base_url), content_name) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() return actual_checksum diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_directory.py b/pulpcore/tests/functional/api/using_plugin/test_content_directory.py index 414f222538..7b7f0efa5c 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_directory.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_directory.py @@ -18,6 +18,7 @@ def test_hidden_distros(file_distribution_factory, pulp_content_url, http_get): def test_zero_byte_file_listing( file_bindings, file_distribution_factory, + file_distribution_base_url, file_repo_with_auto_publish, random_artifact_factory, http_get, @@ -37,8 +38,7 @@ def test_zero_byte_file_listing( ).task monitor_task(task) distribution = file_distribution_factory(repository=file_repo_with_auto_publish.pulp_href) - - response = http_get(distribution.base_url) + response = http_get(file_distribution_base_url(distribution.base_url)) z_line = [i for i in response.decode("utf-8").split("\n") if i.startswith('')] assert len(z_line) == 1 assert z_line[0].endswith("0 Bytes") diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_path.py b/pulpcore/tests/functional/api/using_plugin/test_content_path.py index 71c96a83e1..a2533f3737 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_path.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_path.py @@ -11,6 +11,7 @@ @pytest.mark.parallel def test_content_directory_listing( + bindings_cfg, pulpcore_bindings, file_distribution_factory, gen_object_with_cleanup, @@ -35,8 +36,11 @@ def test_content_directory_listing( ]: file_distribution_factory(base_path=base_path + path, content_guard=content_guard) + content_origin = pulp_status.content_settings.content_origin + if not content_origin: + content_origin = bindings_cfg.host base_url = urljoin( - pulp_status.content_settings.content_origin, + content_origin, pulp_status.content_settings.content_path_prefix, ) if pulp_settings.DOMAIN_ENABLED: @@ -58,7 +62,7 @@ def test_content_directory_listing( # Assert that not using a trailing slash on the root returns a 301 base_url = urljoin( - pulp_status.content_settings.content_origin, + content_origin, pulp_status.content_settings.content_path_prefix, ) if pulp_settings.DOMAIN_ENABLED: diff --git a/pulpcore/tests/functional/api/using_plugin/test_content_promotion.py b/pulpcore/tests/functional/api/using_plugin/test_content_promotion.py index d118aa0d82..d9dbc9bcae 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_content_promotion.py +++ b/pulpcore/tests/functional/api/using_plugin/test_content_promotion.py @@ -12,6 +12,7 @@ @pytest.mark.parallel def test_content_promotion( file_bindings, + file_distribution_base_url, file_repo_with_auto_publish, file_remote_ssl_factory, file_distribution_factory, @@ -43,12 +44,13 @@ def test_content_promotion( for distro in [distribution1, distribution2, distribution3]: # Assert that all 3 distributions can be accessed - r = get_from_url(distro.base_url) + distro_base_url = file_distribution_base_url(distro.base_url) + r = get_from_url(distro_base_url) assert r.status == 200 # Download one of the files from the distribution and assert it has the correct checksum expected_files_list = list(expected_files) content_unit = expected_files_list[0] - content_unit_url = urljoin(distro.base_url, content_unit[0]) + content_unit_url = urljoin(distro_base_url, content_unit[0]) downloaded_file = download_file(content_unit_url) actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() expected_checksum = content_unit[1] diff --git a/pulpcore/tests/functional/api/using_plugin/test_contentguard.py b/pulpcore/tests/functional/api/using_plugin/test_contentguard.py index b6d66d9b75..3fe17540ee 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_contentguard.py +++ b/pulpcore/tests/functional/api/using_plugin/test_contentguard.py @@ -14,6 +14,7 @@ def test_rbac_content_guard_full_workflow( pulpcore_bindings, file_bindings, + file_distribution_base_url, pulp_admin_user, anonymous_user, gen_user, @@ -44,7 +45,7 @@ def _assert_access(authorized_users): auth = BasicAuth(login=user.username, password=user.password) else: auth = None - response = get_from_url(distro.base_url, auth=auth) + response = get_from_url(file_distribution_base_url(distro.base_url), auth=auth) expected_status = 404 if user in authorized_users else 403 assert response.status == expected_status, f"Failed on {user.username=}" @@ -94,6 +95,7 @@ def _assert_access(authorized_users): def test_header_contentguard_workflow( pulpcore_bindings, file_bindings, + file_distribution_base_url, gen_user, file_distribution_factory, gen_object_with_cleanup, @@ -118,7 +120,8 @@ def test_header_contentguard_workflow( assert guard.pulp_href == distro.content_guard # Expect to receive a 403 Forbiden - response = get_from_url(distro.base_url, headers=None) + distro_base_url = file_distribution_base_url(distro.base_url) + response = get_from_url(distro_base_url, headers=None) assert response.status == 403 # Expect the status to be 404 given the distribution is accessible @@ -126,7 +129,7 @@ def test_header_contentguard_workflow( header_value = b64encode(b"123456").decode("ascii") headers = {"x-header": header_value} - response = get_from_url(distro.base_url, headers=headers) + response = get_from_url(distro_base_url, headers=headers) assert response.status == 404 # Check the access using an jq_filter @@ -160,7 +163,7 @@ def test_header_contentguard_workflow( header_value = b64encode(byte_header_content).decode("utf8") headers = {header_name: header_value} - response = get_from_url(distro.base_url, headers=headers) + response = get_from_url(file_distribution_base_url(distro.base_url), headers=headers) assert response.status == 404 @@ -245,6 +248,7 @@ def test_composite_contentguard_permissions( gen_user, gen_object_with_cleanup, monitor_task, + file_distribution_base_url, file_distribution_factory, ): # Create allowed-user @@ -277,18 +281,20 @@ def test_composite_contentguard_permissions( # Create "unattached" FileDistribution distro = file_distribution_factory() + distro_base_url = file_distribution_base_url(distro.base_url) # attempt access to base-url, expect 404 (no content, no guards) - response = get_from_url(distro.base_url) + response = get_from_url(distro_base_url) assert response.status == 404 # Assign CCG1, no guards body = PatchedfileFileDistribution(content_guard=ccg1.pulp_href) monitor_task(file_bindings.DistributionsFileApi.partial_update(distro.pulp_href, body).task) distro = file_bindings.DistributionsFileApi.read(distro.pulp_href) + distro_base_url = file_distribution_base_url(distro.base_url) assert ccg1.pulp_href == distro.content_guard # attempt access to base-url, expect 404 (no content, no guards allows) - response = get_from_url(distro.base_url) + response = get_from_url(distro_base_url) assert response.status == 404 # update CCG with RCG @@ -296,7 +302,7 @@ def test_composite_contentguard_permissions( ccg1 = pulpcore_bindings.ContentguardsCompositeApi.partial_update(ccg1.pulp_href, body) # attempt dist-access, expect 403 (1 guard, forbids) - response = get_from_url(distro.base_url) + response = get_from_url(distro_base_url) assert response.status == 403 # Create HeaderContentGuard, update CCG with [RCG, HCG] @@ -304,7 +310,7 @@ def test_composite_contentguard_permissions( pulpcore_bindings.ContentguardsCompositeApi.partial_update(ccg1.pulp_href, body) # attempt dist-access, expect 403 (2 guards, both forbid) - response = get_from_url(distro.base_url) + response = get_from_url(distro_base_url) assert response.status == 403 # examine error-response, expect one from each guard @@ -315,5 +321,5 @@ def test_composite_contentguard_permissions( # expect 404 for allowed-user (2 guards, one allows) header_value = b64encode(b"123456").decode("ascii") headers = {"x-header": header_value} - response = get_from_url(distro.base_url, headers=headers) + response = get_from_url(distro_base_url, headers=headers) assert response.status == 404 diff --git a/pulpcore/tests/functional/api/using_plugin/test_reclaim_disk_space.py b/pulpcore/tests/functional/api/using_plugin/test_reclaim_disk_space.py index a287e053e6..e645e1c1c7 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_reclaim_disk_space.py +++ b/pulpcore/tests/functional/api/using_plugin/test_reclaim_disk_space.py @@ -83,6 +83,7 @@ def _sync_repository_distribution(policy="immediate"): @pytest.mark.parallel def test_reclaim_on_demand_content( + file_distribution_base_url, pulpcore_bindings, sync_repository_distribution, monitor_task, @@ -93,8 +94,9 @@ def test_reclaim_on_demand_content( """ repo, remote, distribution = sync_repository_distribution(policy="on_demand") - content = get_files_in_manifest(urljoin(distribution.base_url, "PULP_MANIFEST")).pop() - download_file(urljoin(distribution.base_url, content[0])) + distribution_base_url = file_distribution_base_url(distribution.base_url) + content = get_files_in_manifest(urljoin(distribution_base_url, "PULP_MANIFEST")).pop() + download_file(urljoin(distribution_base_url, content[0])) expected_files = get_files_in_manifest(remote.url) artifact_sha256 = get_file_by_path(content[0], expected_files)[1] @@ -108,13 +110,14 @@ def test_reclaim_on_demand_content( assert 0 == pulpcore_bindings.ArtifactsApi.list(sha256=artifact_sha256).count - download_file(urljoin(distribution.base_url, content[0])) + download_file(urljoin(distribution_base_url, content[0])) assert 1 == pulpcore_bindings.ArtifactsApi.list(sha256=artifact_sha256).count @pytest.mark.parallel def test_immediate_reclaim_becomes_on_demand( + file_distribution_base_url, pulpcore_bindings, sync_repository_distribution, monitor_task, @@ -125,9 +128,10 @@ def test_immediate_reclaim_becomes_on_demand( artifacts_before_reclaim = pulpcore_bindings.ArtifactsApi.list().count assert artifacts_before_reclaim > 0 - content = get_files_in_manifest(urljoin(distribution.base_url, "PULP_MANIFEST")).pop() + distribution_base_url = file_distribution_base_url(distribution.base_url) + content = get_files_in_manifest(urljoin(distribution_base_url, "PULP_MANIFEST")).pop() # Populate cache - download_file(urljoin(distribution.base_url, content[0])) + download_file(urljoin(distribution_base_url, content[0])) reclaim_response = pulpcore_bindings.RepositoriesReclaimSpaceApi.reclaim( {"repo_hrefs": [repo.pulp_href]} @@ -138,7 +142,7 @@ def test_immediate_reclaim_becomes_on_demand( artifact_sha256 = get_file_by_path(content[0], expected_files)[1] assert 0 == pulpcore_bindings.ArtifactsApi.list(sha256=artifact_sha256).count - download_file(urljoin(distribution.base_url, content[0])) + download_file(urljoin(distribution_base_url, content[0])) assert 1 == pulpcore_bindings.ArtifactsApi.list(sha256=artifact_sha256).count diff --git a/pulpcore/tests/functional/api/using_plugin/test_repo_versions.py b/pulpcore/tests/functional/api/using_plugin/test_repo_versions.py index 115dcba355..74170f2686 100644 --- a/pulpcore/tests/functional/api/using_plugin/test_repo_versions.py +++ b/pulpcore/tests/functional/api/using_plugin/test_repo_versions.py @@ -793,6 +793,7 @@ def test_clear_all_units_repo_version( @pytest.mark.parallel def test_repo_version_retention( file_bindings, + file_distribution_base_url, file_repository_content, file_repository_factory, file_remote_ssl_factory, @@ -873,7 +874,8 @@ def test_repo_version_retention( # check that the last publication is distributed distro = file_distribution_factory(repository=repo.pulp_href) - manifest_files = get_files_in_manifest(f"{distro.base_url}PULP_MANIFEST") + distro_base_url = file_distribution_base_url(distro.base_url) + manifest_files = get_files_in_manifest(f"{distro_base_url}PULP_MANIFEST") assert len(manifest_files) == contents.count diff --git a/template_config.yml b/template_config.yml index fdc877497d..4747679d0d 100644 --- a/template_config.yml +++ b/template_config.yml @@ -58,6 +58,7 @@ pulp_settings: tmpfile_protection_time: 10 upload_protection_time: 10 pulp_settings_azure: + content_origin: null api_root_rewrite_header: X-API-Root domain_enabled: true rest_framework__default_permission_classes: