From 536d645a286f2924ab75f2de0d833a316cf5b566 Mon Sep 17 00:00:00 2001 From: Renata Vaderna Date: Mon, 16 Dec 2024 18:49:40 +0100 Subject: [PATCH 01/26] Remove unused optional parameter from _yk_piv_ctrl (#572) * refact: remove pub_key_pem input from yubikey functions * refact: remove pub_key_pem from _yk_piv_ctrl * fix: fix setup test yubikey * docs: update changelog * chore: fix changelog * chore: bump yubikey-manager version * docs: remove public key pem from _yk_piv_ctrl docstring --- CHANGELOG.md | 5 ++- setup.py | 2 +- taf/api/yubikey.py | 4 +-- taf/repository_tool.py | 2 +- taf/tools/yubikey/yubikey_utils.py | 2 +- taf/yubikey.py | 55 +++++++++--------------------- 6 files changed, 23 insertions(+), 47 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d3d6cddfc..10acee0d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,16 +9,16 @@ and this project adheres to [Semantic Versioning][semver]. ### Added +- Remove unused optional parameter from _yk_piv_ctrl ([572]) - Implement full partial update. Store last validated commit per repo ([559)]) ### Changed ### Fixed - +[572]: https://github.com/openlawlibrary/taf/pull/572 [559]: https://github.com/openlawlibrary/taf/pull/558 - ## [0.32.4] ### Added @@ -53,7 +53,6 @@ and this project adheres to [Semantic Versioning][semver]. ### Fixed - [564]: https://github.com/openlawlibrary/taf/pull/564 ## [0.32.1] - 11/01/2024 diff --git a/setup.py b/setup.py index 20fd85bd7..a9ce5ac33 100644 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ "jinja2==3.1.*", ] -yubikey_require = ["yubikey-manager==5.1.*"] +yubikey_require = ["yubikey-manager==5.5.*"] kwargs = { diff --git a/taf/api/yubikey.py b/taf/api/yubikey.py index 0d80a0a9a..9e3808378 100644 --- a/taf/api/yubikey.py +++ b/taf/api/yubikey.py @@ -161,7 +161,7 @@ def setup_signing_yubikey( on_exceptions=TAFError, reraise=True, ) -def setup_test_yubikey(key_path: str) -> None: +def setup_test_yubikey(key_path: str, key_size: Optional[int] = 2048) -> None: """ Reset the inserted yubikey, set default pin and copy the specified key to it. @@ -183,7 +183,7 @@ def setup_test_yubikey(key_path: str) -> None: print(f"Importing RSA private key from {key_path} to Yubikey...") pin = yk.DEFAULT_PIN - pub_key = yk.setup(pin, "Test Yubikey", private_key_pem=key_pem) + pub_key = yk.setup(pin, "Test Yubikey", private_key_pem=key_pem, key_size=key_size) print("\nPrivate key successfully imported.\n") print("\nPublic key (PEM): \n{}".format(pub_key.decode("utf-8"))) print("Pin: {}\n".format(pin)) diff --git a/taf/repository_tool.py b/taf/repository_tool.py index be69cf9e3..1f5a649a2 100644 --- a/taf/repository_tool.py +++ b/taf/repository_tool.py @@ -155,7 +155,7 @@ def _check_key_and_get_pin(expected_key_id): inserted_key = yk.get_piv_public_key_tuf() if expected_key_id != inserted_key["keyid"]: return None - serial_num = yk.get_serial_num(inserted_key) + serial_num = yk.get_serial_num() pin = yk.get_key_pin(serial_num) if pin is None: pin = yk.get_and_validate_pin(name) diff --git a/taf/tools/yubikey/yubikey_utils.py b/taf/tools/yubikey/yubikey_utils.py index b993e03f5..87bd79b9f 100644 --- a/taf/tools/yubikey/yubikey_utils.py +++ b/taf/tools/yubikey/yubikey_utils.py @@ -147,7 +147,7 @@ def __init__(self, keystore_path, scheme): @contextmanager -def _yk_piv_ctrl_mock(serial=None, pub_key_pem=None): +def _yk_piv_ctrl_mock(serial=None): global INSERTED_YUBIKEY if INSERTED_YUBIKEY is None: diff --git a/taf/yubikey.py b/taf/yubikey.py index 3630803ae..002f6a2c3 100644 --- a/taf/yubikey.py +++ b/taf/yubikey.py @@ -95,12 +95,11 @@ def decorator(*args, **kwargs): @contextmanager -def _yk_piv_ctrl(serial=None, pub_key_pem=None): +def _yk_piv_ctrl(serial=None): """Context manager to open connection and instantiate Piv Session. Args: - - pub_key_pem(str): Match Yubikey's public key (PEM) if multiple keys - are inserted + - serial (str): Match Yubikey's serial multiple keys are inserted Returns: - ykman.piv.PivSession @@ -110,35 +109,13 @@ def _yk_piv_ctrl(serial=None, pub_key_pem=None): """ # If pub_key_pem is given, iterate all devices, read x509 certs and try to match # public keys. - if pub_key_pem is not None: - for dev, info in list_all_devices(): - # Connect to a YubiKey over a SmartCardConnection, which is needed for PIV. + for dev, info in list_all_devices(): + if serial is None or info.serial == serial: with dev.open_connection(SmartCardConnection) as connection: session = PivSession(connection) - device_pub_key_pem = ( - session.get_certificate(SLOT.SIGNATURE) - .public_key() - .public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - ) - .decode("utf-8") - ) - # Tries to match without last newline char - if ( - device_pub_key_pem == pub_key_pem - or device_pub_key_pem[:-1] == pub_key_pem - ): - break yield session, info.serial - else: - for dev, info in list_all_devices(): - if serial is None or info.serial == serial: - with dev.open_connection(SmartCardConnection) as connection: - session = PivSession(connection) - yield session, info.serial - else: - pass + else: + pass def is_inserted(): @@ -178,7 +155,7 @@ def is_valid_pin(pin): @raise_yubikey_err("Cannot get serial number.") -def get_serial_num(pub_key_pem=None): +def get_serial_num(): """Get Yubikey serial number. Args: @@ -191,12 +168,12 @@ def get_serial_num(pub_key_pem=None): Raises: - YubikeyError """ - with _yk_piv_ctrl(pub_key_pem=pub_key_pem) as (_, serial): + with _yk_piv_ctrl() as (_, serial): return serial @raise_yubikey_err("Cannot export x509 certificate.") -def export_piv_x509(cert_format=serialization.Encoding.PEM, pub_key_pem=None): +def export_piv_x509(cert_format=serialization.Encoding.PEM): """Exports YubiKey's piv slot x509. Args: @@ -210,13 +187,13 @@ def export_piv_x509(cert_format=serialization.Encoding.PEM, pub_key_pem=None): Raises: - YubikeyError """ - with _yk_piv_ctrl(pub_key_pem=pub_key_pem) as (ctrl, _): + with _yk_piv_ctrl() as (ctrl, _): x509 = ctrl.get_certificate(SLOT.SIGNATURE) return x509.public_bytes(encoding=cert_format) @raise_yubikey_err("Cannot export public key.") -def export_piv_pub_key(pub_key_format=serialization.Encoding.PEM, pub_key_pem=None): +def export_piv_pub_key(pub_key_format=serialization.Encoding.PEM): """Exports YubiKey's piv slot public key. Args: @@ -230,7 +207,7 @@ def export_piv_pub_key(pub_key_format=serialization.Encoding.PEM, pub_key_pem=No Raises: - YubikeyError """ - with _yk_piv_ctrl(pub_key_pem=pub_key_pem) as (ctrl, _): + with _yk_piv_ctrl() as (ctrl, _): try: x509_cert = ctrl.get_certificate(SLOT.SIGNATURE) public_key = x509_cert.public_key() @@ -256,7 +233,7 @@ def export_yk_certificate(certs_dir, key): @raise_yubikey_err("Cannot get public key in TUF format.") -def get_piv_public_key_tuf(scheme=DEFAULT_RSA_SIGNATURE_SCHEME, pub_key_pem=None): +def get_piv_public_key_tuf(scheme=DEFAULT_RSA_SIGNATURE_SCHEME): """Return public key from a Yubikey in TUF's RSAKEY_SCHEMA format. Args: @@ -272,12 +249,12 @@ def get_piv_public_key_tuf(scheme=DEFAULT_RSA_SIGNATURE_SCHEME, pub_key_pem=None Raises: - YubikeyError """ - pub_key_pem = export_piv_pub_key(pub_key_pem=pub_key_pem).decode("utf-8") + pub_key_pem = export_piv_pub_key().decode("utf-8") return import_rsakey_from_pem(pub_key_pem, scheme) @raise_yubikey_err("Cannot sign data.") -def sign_piv_rsa_pkcs1v15(data, pin, pub_key_pem=None): +def sign_piv_rsa_pkcs1v15(data, pin): """Sign data with key from YubiKey's piv slot. Args: @@ -292,7 +269,7 @@ def sign_piv_rsa_pkcs1v15(data, pin, pub_key_pem=None): Raises: - YubikeyError """ - with _yk_piv_ctrl(pub_key_pem=pub_key_pem) as (ctrl, _): + with _yk_piv_ctrl() as (ctrl, _): ctrl.verify_pin(pin) return ctrl.sign( SLOT.SIGNATURE, KEY_TYPE.RSA2048, data, hashes.SHA256(), padding.PKCS1v15() From f3c3d6d34f051e005b660c5930d5a86d13b7ea57 Mon Sep 17 00:00:00 2001 From: Renata Vaderna Date: Mon, 16 Dec 2024 19:17:29 +0100 Subject: [PATCH 02/26] Add tests for get last remote commit and reset to commit (#573) * test: add tests for get last remote commit and reset to commit * chore: update changelog * test: fix failing test * docs: remove incorrect docstring --- CHANGELOG.md | 5 +++- taf/git.py | 17 ++++++++++---- taf/tests/test_git/conftest.py | 11 +++++++++ taf/tests/test_git/test_git.py | 42 ++++++++++++++++++++++++++++++++++ 4 files changed, 69 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 10acee0d6..7d28a0689 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning][semver]. ### Added +- Add tests for `get_last_remote_commit` and `reset_to_commit` ([573]) - Remove unused optional parameter from _yk_piv_ctrl ([572]) - Implement full partial update. Store last validated commit per repo ([559)]) @@ -16,8 +17,10 @@ and this project adheres to [Semantic Versioning][semver]. ### Fixed +[573]: https://github.com/openlawlibrary/taf/pull/573 [572]: https://github.com/openlawlibrary/taf/pull/572 -[559]: https://github.com/openlawlibrary/taf/pull/558 +[559]: https://github.com/openlawlibrary/taf/pull/559 + ## [0.32.4] diff --git a/taf/git.py b/taf/git.py index f732598bd..02fff634d 100644 --- a/taf/git.py +++ b/taf/git.py @@ -1460,13 +1460,19 @@ def reset_num_of_commits( self._git(f"reset {flag} HEAD~{num_of_commits}") def reset_to_commit( - self, commit: str, branch: Optional[str] = None, hard: Optional[bool] = False + self, + commit: str, + branch: Optional[str] = None, + hard: Optional[bool] = False, + reset_remote_tracking=True, ) -> None: flag = "--hard" if hard else "--soft" - if branch is None: - branch = self.get_current_branch() - self.update_branch_refs(branch, commit) + if reset_remote_tracking: + if branch is None: + branch = self.get_current_branch() + self.update_branch_refs(branch, commit) + if hard: self._git(f"reset {flag} HEAD") @@ -1626,7 +1632,8 @@ def _determine_default_branch(self) -> Optional[str]: def top_commit_of_remote_branch(self, branch, remote="origin"): """ - Fetches the top commit of the specified remote branch. + Fetches the top commit of the specified remote tracking branch. + """ remote_branch = f"{remote}/{branch}" if not self.branch_exists(remote_branch, include_remotes=True): diff --git a/taf/tests/test_git/conftest.py b/taf/tests/test_git/conftest.py index c328bf4c4..4839855d4 100644 --- a/taf/tests/test_git/conftest.py +++ b/taf/tests/test_git/conftest.py @@ -10,6 +10,7 @@ TEST_DIR = Path(TEST_DATA_REPOS_PATH, "test-git") REPO_NAME = "repository" CLONE_REPO_NAME = "repository2" +ORIGIN_REPO_NAME = "origin_repo" @fixture @@ -33,6 +34,16 @@ def repository(): shutil.rmtree(path, onerror=on_rm_error) +@fixture +def origin_repo(repository): + path = TEST_DIR / ORIGIN_REPO_NAME + repo = GitRepository(path=path) + repo.clone_from_disk(repository.path, is_bare=True) + yield repo + repo.cleanup() + shutil.rmtree(path, onerror=on_rm_error) + + @fixture def clone_repository(): path = TEST_DIR / CLONE_REPO_NAME diff --git a/taf/tests/test_git/test_git.py b/taf/tests/test_git/test_git.py index 0478e0b10..7e24eefec 100644 --- a/taf/tests/test_git/test_git.py +++ b/taf/tests/test_git/test_git.py @@ -51,3 +51,45 @@ def test_all_commits_since_commit_when_repo_empty(empty_repository): all_commits_empty = empty_repository.all_commits_since_commit() assert isinstance(all_commits_empty, list) assert len(all_commits_empty) == 0 + + +def test_get_last_remote_commit(origin_repo, clone_repository): + clone_repository.urls = [origin_repo.path] + clone_repository.clone() + clone_repository.commit_empty("test commit1") + top_commit = clone_repository.commit_empty("test commit2") + clone_repository.push() + initial_commit = clone_repository.initial_commit + clone_repository.reset_to_commit(initial_commit) + assert ( + clone_repository.top_commit_of_branch(clone_repository.default_branch) + == initial_commit + ) + last_remote_on_origin = clone_repository.get_last_remote_commit( + clone_repository.get_remote_url() + ) + assert last_remote_on_origin == top_commit + + +def test_reset_to_commit_when_reset_remote_tracking(origin_repo, clone_repository): + # reset to commit is also expected to update the remote tracking branch by default + clone_repository.urls = [origin_repo.path] + clone_repository.clone() + initial_commit = clone_repository.initial_commit + clone_repository.reset_to_commit(initial_commit) + assert ( + clone_repository.top_commit_of_remote_branch(clone_repository.default_branch) + == initial_commit + ) + + +def test_reset_to_commit_when_not_reset_remote_tracking(origin_repo, clone_repository): + clone_repository.urls = [origin_repo.path] + clone_repository.clone() + top_commit = clone_repository.head_commit_sha() + initial_commit = clone_repository.initial_commit + clone_repository.reset_to_commit(initial_commit, reset_remote_tracking=False) + assert ( + clone_repository.top_commit_of_remote_branch(clone_repository.default_branch) + == top_commit + ) From f23c2578e385964ad90671dc800efe33307dc9a8 Mon Sep 17 00:00:00 2001 From: Sasa Bojanic <48201593+sale3@users.noreply.github.com> Date: Mon, 16 Dec 2024 19:18:01 +0100 Subject: [PATCH 03/26] sbojanic/ci improve security (#567) * ci: replace tags with commit hashes * docs: add tag name to comment --- .github/workflows/ci.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 18b486244..63b74fde3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,10 +32,10 @@ jobs: matrix: python-version: ${{ fromJSON(needs.set_python_versions.outputs.all_versions) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #@v5 with: python-version: ${{ matrix.python-version }} @@ -68,10 +68,10 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #@v4 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #@v5 with: # build it on the minimum version python-version: ${{ fromJSON(needs.set_python_versions.outputs.all_versions)[0] }} @@ -117,14 +117,14 @@ jobs: steps: - name: Checkout Repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #@v4 - name: Get Upload URL id: get_upload_url run: echo "${{ github.event.release.upload_url }}" - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #@v5 with: # the newest python that we support python-version: ${{ needs.set_python_versions.outputs.last_version }} From 4490e9b7e24d64e27aa558876f8d6dbc4e80192c Mon Sep 17 00:00:00 2001 From: Renata Date: Thu, 19 Dec 2024 02:05:37 -0500 Subject: [PATCH 04/26] docs: add/update a number of metadata repository docstrings --- taf/api/roles.py | 4 +- .../tuf/test_create_edit_repo/test_update.py | 2 +- taf/tuf/keys.py | 4 - taf/tuf/repository.py | 135 ++++++++++++++---- 4 files changed, 114 insertions(+), 31 deletions(-) diff --git a/taf/api/roles.py b/taf/api/roles.py index a36819757..18bb676ba 100644 --- a/taf/api/roles.py +++ b/taf/api/roles.py @@ -135,7 +135,7 @@ def add_role( skip_prompt=skip_prompt, certs_dir=auth_repo.certs_dir, ) - auth_repo.create_delegated_role([new_role], signers) + auth_repo.create_delegated_roles([new_role], signers) auth_repo.add_new_roles_to_snapshot([new_role.name]) auth_repo.do_timestamp() @@ -300,7 +300,7 @@ def add_multiple_roles( ) all_signers.update(signers) - auth_repo.create_delegated_role(roles_to_add_data, all_signers) + auth_repo.create_delegated_roles(roles_to_add_data, all_signers) auth_repo.add_new_roles_to_snapshot(roles_to_add) auth_repo.do_timestamp() diff --git a/taf/tests/tuf/test_create_edit_repo/test_update.py b/taf/tests/tuf/test_create_edit_repo/test_update.py index f212b3432..88c49bdcd 100644 --- a/taf/tests/tuf/test_create_edit_repo/test_update.py +++ b/taf/tests/tuf/test_create_edit_repo/test_update.py @@ -55,7 +55,7 @@ def test_add_new_role(tuf_repo, signers): threshold=threshold, yubikey=False, ) - tuf_repo.create_delegated_role([new_role], role_signers) + tuf_repo.create_delegated_roles([new_role], role_signers) assert tuf_repo.targets().version == 2 assert role_name in tuf_repo.targets().delegations.roles new_role_obj = tuf_repo.open(role_name) diff --git a/taf/tuf/keys.py b/taf/tuf/keys.py index ffd1c969f..784102263 100644 --- a/taf/tuf/keys.py +++ b/taf/tuf/keys.py @@ -27,10 +27,6 @@ from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME -def create_signer(priv, pub): - return CryptoSigner(priv, _from_crypto(pub)) - - def generate_rsa_keypair(key_size=3072, password=None): # Generate private key private_key = rsa.generate_private_key( diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index ee090f4e2..4a5678fc7 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -66,10 +66,25 @@ def get_role_metadata_path(role: str) -> str: + """ + Arguments: + role: Name of a TUF role, main or delegated + + Return: + Path of the metadata file corresponding to the specified role, + relative to the repository's root + """ return f"{METADATA_DIRECTORY_NAME}/{role}.json" def get_target_path(target_name: str) -> str: + """ + Arguments: + target_name: Name of the target file expected to be inside the targets directory + + Return: + Path of the specified target file relative to the repository's root + """ return f"{TARGETS_DIRECTORY_NAME}/{target_name}" @@ -124,23 +139,44 @@ def __init__(self, path: Union[Path, str], *args, **kwargs) -> None: @property def metadata_path(self) -> Path: + """ + Full path of the metadata directory. + """ return self.path / METADATA_DIRECTORY_NAME @property def targets_path(self): + """ + Full path of the targets directory. + """ return self.path / TARGETS_DIRECTORY_NAME @property def targets_infos(self) -> Dict[str, MetaFile]: - # tracks targets and root metadata changes, needed in `do_snapshot` + """ + Tracks targets and root metadata changes, needed in `do_snapshot` + """ return self._targets_infos @property def snapshot_info(self) -> MetaFile: - # tracks snapshot metadata changes, needed in `do_timestamp` + """ + Tracks snapshot metadata changes, needed in `do_timestamp` + """ return self._snapshot_info def calculate_hashes(self, md: Metadata, algorithms: List[str]) -> Dict: + """ + Calculate hashes of the specified signed metadata after serializing + it using the previously initialized serializer. + Hashes are computed for each specified algorithm. + + Arguments: + md: Signed metadata + algorithms: A list of hash algorithms (e.g., 'sha256', 'sha512'). + Return: + A dcitionary mapping algorithms and calculated hashes + """ hashes = {} data = md.to_bytes(serializer=self.serializer) for algo in algorithms: @@ -150,10 +186,16 @@ def calculate_hashes(self, md: Metadata, algorithms: List[str]) -> Dict: hashes[algo] = digest_object.hexdigest() return hashes - def calculate_length( - self, - md: Metadata, - ) -> int: + def calculate_length(self, md: Metadata) -> int: + """ + Calculate length of the specified signed metadata after serializing + it using the previously initialized serializer. + + Arguments: + md: Signed metadata + Return: + Langth of the signed metadata + """ data = md.to_bytes(serializer=self.serializer) return len(data) @@ -238,23 +280,28 @@ def _filter_if_can_be_added(roles): def add_target_files_to_role(self, added_data: Dict[str, Dict]) -> None: """Add target files to top-level targets metadata. - Args: - - added_data(dict): Dictionary of new data whose keys are target paths of repositories - (as specified in targets.json, relative to the targets dictionary). - The values are of form: - { - target: content of the target file - custom: { - custom_field1: custom_value1, - custom_field2: custom_value2 - } + + Arguments: + added_data(dict): Dictionary of new data whose keys are target paths of repositories + (as specified in targets.json, relative to the targets dictionary). + The values are of form: + { + target: content of the target file + custom: { + custom_field1: custom_value1, + custom_field2: custom_value2 } + } """ self.modify_targets(added_data=added_data) def add_path_to_delegated_role(self, role: str, paths: List[str]) -> bool: """ Add delegated paths to delegated role and return True if successful + + Arguments: + role: Name of a delegated target role + path: A list of paths to be appended to a list of the role's delegated pats """ if not self.check_if_role_exists(role): raise TAFError(f"Role {role} does not exist") @@ -271,6 +318,11 @@ def add_path_to_delegated_role(self, role: str, paths: List[str]) -> bool: return True def add_new_roles_to_snapshot(self, roles: List[str]): + """ + Add versions of newly created target roles to the snapshot. + Also update the versions of their parent roles, which are modified + when a new delegated role is added. + """ with self.edit(Snapshot.type) as sn: parents_of_roles = set() for role in roles: @@ -283,6 +335,11 @@ def add_new_roles_to_snapshot(self, roles: List[str]): ) def add_to_open_metadata(self, roles: List[str]): + """ + In order to execute several methods before updating the metadata on disk, + these methods need to be added to the _metadata_to_keep_open list. + This method adds all roles from the provided list to _metadata_to_keep_open. + """ self._metadata_to_keep_open.update(roles) def open(self, role: str) -> Metadata: @@ -294,6 +351,10 @@ def open(self, role: str) -> Metadata: raise TAFError(f"Metadata file {path} does not exist") def check_if_keys_loaded(self, role_name: str) -> bool: + """ + Check if at least a threshold of signers of the specified role + has been added to the signer cache. + """ threshold = self.get_role_threshold(role_name) return ( role_name in self.signer_cache @@ -301,6 +362,9 @@ def check_if_keys_loaded(self, role_name: str) -> bool: ) def check_if_role_exists(self, role_name: str) -> bool: + """ + Given a name of a main or delegated target role, return True if it exist + """ role = self._role_obj(role_name) return role is not None @@ -352,6 +416,10 @@ def check_roles_expiration_dates( return expired_dict, will_expire_dict def _create_target_file(self, target_path, target_data): + """ + Writes the specified data to a target file and stores it on disk. + If the target data is a dictionary, the data is written in JSON format. + """ # if the target's parent directory should not be "targets", create # its parent directories if they do not exist target_dir = target_path.parent @@ -370,6 +438,9 @@ def _create_target_file(self, target_path, target_data): f.write(content) def clear_open_metadata(self): + """ + Removes everything from the _metadata_to_keep_open list + """ self._metadata_to_keep_open = set() def close(self, role: str, md: Metadata) -> None: @@ -422,7 +493,7 @@ def create( 2. Create initial versions of top-level metadata 3. Perform top-level delegation using keys from passed signers. - Args: + Arguments: roles_keys_data: an object containing information about roles, their threshold, delegations etc. signers: A dictionary, where dict-keys are role names and values are dictionaries, where-dict keys are keyids and values @@ -511,9 +582,21 @@ def create( signed.version = 0 # `close` will bump to initial valid verison 1 self.close(name, Metadata(signed)) - def create_delegated_role( + def create_delegated_roles( self, roles_data: List[TargetsRole], signers: Dict[str, List[CryptoSigner]] - ): + ) -> Tuple[List, List]: + """ + Create a new delegated roles, signes them using the provided signers and + updates their paren roles. + + Arguments: + roles_data (list): A list containing data about new roles. Each entry specifies + a role's name, path, threshold, and number of signing keys. + signers (dict): A dictionary that maps each new role to a list of its signers + + Return: + A list ofroles that were added and a list of roles that already existed + """ existing_roles = self.get_all_targets_roles() existing_roles.extend(MAIN_ROLES) existing_roles = [] @@ -564,6 +647,9 @@ def create_delegated_role( def _create_target_object( self, filesystem_path: str, target_path: str, custom: Optional[Dict] ): + """ + Creates a TUF target object, later used to update targets metadata + """ data = Path(filesystem_path).read_text().encode() target_file = TargetFile.from_data( target_file_path=target_path, @@ -585,11 +671,11 @@ def delete_unregistered_target_files(self, targets_role="targets"): if file_rel_path not in self.get_targets_of_role(targets_role): (self.targets_path / file_rel_path).unlink() - def find_delegated_roles_parent(self, delegated_role, parent=None): - if parent is None: - parent = "targets" - - parents = [parent] + def find_delegated_roles_parent(self, delegated_role: str) -> Optional[str]: + """ + Find parent role of the specified delegated targets role + """ + parents = ["targets"] while parents: parent = parents.pop() @@ -777,6 +863,7 @@ def get_role_paths(self, role, parent_role=None): Returns: Defined delegated paths of delegate target role or * in case of targets + (the main target is responsible for signing all target files that no delegated role should sign.) Raises: - securesystemslib.exceptions.FormatError: If the arguments are improperly formatted. From 81e079910f4c00962ea08cc1ca1cfde480450258 Mon Sep 17 00:00:00 2001 From: Renata Date: Thu, 19 Dec 2024 05:23:29 -0500 Subject: [PATCH 05/26] docs: add docstrings to metadata repository --- taf/tuf/repository.py | 134 ++++++++++++++++++++++++++++-------------- 1 file changed, 89 insertions(+), 45 deletions(-) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 4a5678fc7..f0a7dfb2e 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -203,7 +203,7 @@ def add_signers_to_cache(self, roles_signers: Dict): for role, signers in roles_signers.items(): self._load_role_signers(role, signers) - def all_target_files(self): + def all_target_files(self) -> Set: """ Return a set of relative paths of all files inside the targets directory @@ -317,7 +317,7 @@ def add_path_to_delegated_role(self, role: str, paths: List[str]) -> bool: parent.delegations.roles[role].paths.extend(paths) return True - def add_new_roles_to_snapshot(self, roles: List[str]): + def add_new_roles_to_snapshot(self, roles: List[str]) -> None: """ Add versions of newly created target roles to the snapshot. Also update the versions of their parent roles, which are modified @@ -334,7 +334,7 @@ def add_new_roles_to_snapshot(self, roles: List[str]): sn.meta[f"{parent_role}.json"].version + 1 ) - def add_to_open_metadata(self, roles: List[str]): + def add_to_open_metadata(self, roles: List[str]) -> None: """ In order to execute several methods before updating the metadata on disk, these methods need to be added to the _metadata_to_keep_open list. @@ -415,7 +415,7 @@ def check_roles_expiration_dates( return expired_dict, will_expire_dict - def _create_target_file(self, target_path, target_data): + def _create_target_file(self, target_path: Path, target_data: Union[str, Dict]) -> None: """ Writes the specified data to a target file and stores it on disk. If the target data is a dictionary, the data is written in JSON format. @@ -437,7 +437,7 @@ def _create_target_file(self, target_path, target_data): else: f.write(content) - def clear_open_metadata(self): + def clear_open_metadata(self) -> None: """ Removes everything from the _metadata_to_keep_open list """ @@ -486,7 +486,7 @@ def create( roles_keys_data: RolesKeysData, signers: dict, additional_verification_keys: Optional[dict] = None, - ): + ) -> None: """Create a new metadata repository on disk. 1. Create metadata subdir (fail, if exists) @@ -646,7 +646,7 @@ def create_delegated_roles( def _create_target_object( self, filesystem_path: str, target_path: str, custom: Optional[Dict] - ): + ) -> TargetFile: """ Creates a TUF target object, later used to update targets metadata """ @@ -686,8 +686,9 @@ def find_delegated_roles_parent(self, delegated_role: str) -> Optional[str]: return None def find_parents_of_roles(self, roles: List[str]): - # this could be optimized, but not that important - # if we only have a + """ + Find parents of all roles contained by the specified list of roles. + """ parents = set() for role in roles: if role in MAIN_ROLES: @@ -699,27 +700,39 @@ def find_parents_of_roles(self, roles: List[str]): parents.add(parent) return parents - def get_delegations_of_role(self, role_name): + def get_delegations_of_role(self, role_name: str) -> List: + """ + Return a list of delegated roles of the specified target role + """ signed_obj = self.signed_obj(role_name) if signed_obj.delegations: return signed_obj.delegations.roles return [] - def get_keyids_of_role(self, role_name): + def get_keyids_of_role(self, role_name: str) -> List: + """ + Return all key ids of the specified role + """ role_obj = self._role_obj(role_name) return role_obj.keyids - def get_paths_of_role(self, role_name): + def get_paths_of_role(self, role_name: str) -> List: + """ + Return all delegated paths of the specified target role + """ parent = self.find_delegated_roles_parent(role_name) if parent: parent_obj = self.signed_obj(parent) return parent_obj.delegations.roles[role_name].paths return [] - def get_targets_of_role(self, role_name): + def get_targets_of_role(self, role_name: str): + """ + Return all targets of the specified target role + """ return self.signed_obj(role_name).targets - def find_keys_roles(self, public_keys, check_threshold=True): + def find_keys_roles(self, public_keys: List, check_threshold: Optional[bool]=True) -> List: """Find all roles that can be signed by the provided keys. A role can be signed by the list of keys if at least the number of keys that can sign that file is equal to or greater than the role's @@ -728,7 +741,7 @@ def find_keys_roles(self, public_keys, check_threshold=True): key_ids = [_get_legacy_keyid(public_key) for public_key in public_keys] return self.find_keysid_roles(key_ids=key_ids, check_threshold=check_threshold) - def find_keysid_roles(self, key_ids, check_threshold=True): + def find_keysid_roles(self, key_ids: List, check_threshold: Optional[bool]=True) -> List: """Find all roles that can be signed by the provided keys. A role can be signed by the list of keys if at least the number of keys that can sign that file is equal to or greater than the role's @@ -755,14 +768,14 @@ def find_keysid_roles(self, key_ids, check_threshold=True): return keys_roles - def find_associated_roles_of_key(self, public_key: SSlibKey): + def find_associated_roles_of_key(self, public_key: SSlibKey) -> List: """ Find all roles whose metadata files can be signed by this key Threshold is not important, as long as the key is one of the signing keys """ return self.find_keys_roles([public_key], check_threshold=False) - def get_all_roles(self): + def get_all_roles(self) -> List: """ Return a list of all defined roles, main roles combined with delegated targets roles """ @@ -770,7 +783,7 @@ def get_all_roles(self): all_roles = ["root", "snapshot", "timestamp"] + all_target_roles return all_roles - def get_all_targets_roles(self): + def get_all_targets_roles(self) -> List: """ Return a list containing names of all target roles """ @@ -784,7 +797,7 @@ def get_all_targets_roles(self): return all_roles - def get_all_target_files_state(self): + def get_all_target_files_state(self) -> List: """Create dictionaries of added/modified and removed files by comparing current file-system state with current signed targets (and delegations) metadata state. @@ -825,6 +838,9 @@ def get_all_target_files_state(self): return added_target_files, removed_target_files def get_expiration_date(self, role: str) -> datetime: + """ + Return expiration date of the specified role + """ meta_file = self.signed_obj(role) if meta_file is None: raise TAFError(f"Role {role} does not exist") @@ -852,22 +868,18 @@ def get_role_threshold(self, role: str, parent: Optional[str] = None) -> int: raise TAFError(f"Role {role} does not exist") return role_obj.threshold - def get_role_paths(self, role, parent_role=None): + def get_role_paths(self, role): """Get paths of the given role Args: - role(str): TUF role (root, targets, timestamp, snapshot or delegated one) - - parent_role(str): Name of the parent role of the delegated role. If not specified, - it will be set automatically, but this might be slow if there - are many delegations. Returns: Defined delegated paths of delegate target role or * in case of targets (the main target is responsible for signing all target files that no delegated role should sign.) Raises: - - securesystemslib.exceptions.FormatError: If the arguments are improperly formatted. - - securesystemslib.exceptions.UnknownRoleError: If 'rolename' has not been delegated by this + - TAFError if the role does not exist """ if role == "targets": return "*" @@ -876,7 +888,7 @@ def get_role_paths(self, role, parent_role=None): raise TAFError(f"Role {role} does not exist") return role.paths - def get_role_from_target_paths(self, target_paths): + def get_role_from_target_paths(self, target_paths: List) -> str: """ Find a common role that can be used to sign given target paths. @@ -899,7 +911,7 @@ def get_role_from_target_paths(self, target_paths): return common_role.pop() - def get_signable_metadata(self, role): + def get_signable_metadata(self, role: str): """Return signable portion of newly generate metadata for given role. Args: @@ -988,9 +1000,12 @@ def get_target_file_custom_data(self, target_path: str) -> Optional[Dict]: except KeyError: raise TAFError(f"Target {target_path} does not exist") - def get_target_file_hashes(self, target_path, hash_func=HASH_FUNCTION): + def get_target_file_hashes(self, target_path: str, hash_func: str=HASH_FUNCTION) -> str: """ - Return hashes of a given target path. + Return hashes of the given target path. + + Raises: + - TAFError if the target does not exist """ try: role = self.get_role_from_target_paths([target_path]) @@ -1004,7 +1019,11 @@ def get_target_file_hashes(self, target_path, hash_func=HASH_FUNCTION): except KeyError: raise TAFError(f"Target {target_path} does not exist") - def get_key_length_and_scheme_from_metadata(self, parent_role, keyid): + def get_key_length_and_scheme_from_metadata(self, parent_role: str, keyid: str) -> Tuple: + """ + Return length and signing scheme of the specified key id. + This data is specified in metadata files (root or a target role that has delegations) + """ try: metadata = json.loads( Path( @@ -1024,6 +1043,11 @@ def get_key_length_and_scheme_from_metadata(self, parent_role, keyid): return None, None def generate_roles_description(self) -> Dict: + """ + Generate a roles description dictionary, containing information + about each role, like its threhold, number of signing keys, delegations + if it is a target role, key scheme, key lengths. + """ roles_description = {} def _get_delegations(role_name): @@ -1068,22 +1092,15 @@ def _get_delegations(role_name): roles_description[role_name]["delegations"] = delegations_info return {"roles": roles_description} - def get_role_keys(self, role, parent_role=None): + def get_role_keys(self, role): """Get keyids of the given role Args: - role(str): TUF role (root, targets, timestamp, snapshot or delegated one) - - parent_role(str): Name of the parent role of the delegated role. If not specified, - it will be set automatically, but this might be slow if there - are many delegations. Returns: - List of the role's keyids (i.e., keyids of the keys). + List of the role's keyids (i.e., keyids of the keys). - Raises: - - securesystemslib.exceptions.FormatError: If the arguments are improperly formatted. - - securesystemslib.exceptions.UnknownRoleError: If 'rolename' has not been delegated by this - targets object. """ role_obj = self._role_obj(role) if role_obj is None: @@ -1122,7 +1139,7 @@ def is_valid_metadata_key( else: return key_id in self.get_keyids_of_role(role) - def is_valid_metadata_yubikey(self, role, public_key=None): + def is_valid_metadata_yubikey(self, role: str, public_key=None) -> bool: """Checks if metadata role contains key id from YubiKey. Args: @@ -1141,7 +1158,7 @@ def is_valid_metadata_yubikey(self, role, public_key=None): return self.is_valid_metadata_key(role, public_key) - def _load_role_signers(self, role: str, signers: List): + def _load_role_signers(self, role: str, signers: List) -> None: """Verify that the signers can be used to sign the specified role and add them to the signer cache @@ -1162,7 +1179,7 @@ def _load_role_signers(self, role: str, signers: List): raise InvalidKeyError(role) self.signer_cache[role][key.keyid] = signer - def map_signing_roles(self, target_filenames): + def map_signing_roles(self, target_filenames: List) -> Dict: """ For each target file, find delegated role responsible for that target file based on the delegated paths. The most specific role (meaning most deeply nested) whose @@ -1193,7 +1210,7 @@ def map_signing_roles(self, target_filenames): return roles_targets - def modify_targets(self, added_data=None, removed_data=None): + def modify_targets(self, added_data: Optional[Dict]=None, removed_data: Optional[Dict]=None) -> None: """Creates a target.json file containing a repository's commit for each repository. Adds those files to the tuf repository. @@ -1378,7 +1395,10 @@ def roles_targets_for_filenames(self, target_filenames): roles_targets_mapping.setdefault(role_name, []).append(target_filename) return roles_targets_mapping - def _role_obj(self, role, parent=None): + def _role_obj(self, role: str, parent: Optional[str]=None): + """ + Return TUF's role object for the specified role + """ if role in MAIN_ROLES: md = self.open("root") try: @@ -1401,6 +1421,9 @@ def _role_obj(self, role, parent=None): return None def signed_obj(self, role: str): + """ + Return TUF's signed object for the specified role + """ md = self.open(role) return self._signed_obj(role, md) @@ -1421,6 +1444,9 @@ def _signed_obj(self, role: str, md=None): raise TAFError(f"Invalid metadata file {role}.json") def _set_default_expiration_date(self, signed: Signed) -> None: + """ + Update expiration dates of the specified signed object + """ interval = self.expiration_intervals.get(signed.type, 90) start_date = datetime.now(timezone.utc) expiration_date = start_date + timedelta(days=interval) @@ -1469,6 +1495,9 @@ def set_metadata_expiration_date( role.expires = expiration_date def sort_roles_targets_for_filenames(self): + """ + Group target files per target roles + """ rel_paths = [] for filepath in self.targets_path.rglob("*"): if filepath.is_file(): @@ -1484,6 +1513,10 @@ def sort_roles_targets_for_filenames(self): return roles_targets def update_target_role(self, role: str, target_paths: Dict): + """ + Update the specified target role by adding or removing + target files and target objects for the specified target paths + """ if not self.check_if_role_exists(role): raise TAFError(f"Role {role} does not exist") self.verify_signers_loaded([role]) @@ -1503,12 +1536,19 @@ def update_target_role(self, role: str, target_paths: Dict): self._modify_tarets_role(target_files, removed_paths, role) - def update_snapshot_and_timestamp(self, force=True): + def update_snapshot_and_timestamp(self, force: Optional[bool]=True): + """ + Update timestamp and snapshot roles. If force is true, update them + even if their content was not modified + """ self.verify_signers_loaded(["snapshot", "timestamp"]) self.do_snapshot(force=force) self.do_timestamp(force=force) def verify_roles_exist(self, roles: List[str]): + """ + Check if the specified roles exist and raise an error if an least one does not exist + """ non_existant_roles = [] for role in roles: if not self.check_if_role_exists(role): @@ -1517,6 +1557,10 @@ def verify_roles_exist(self, roles: List[str]): raise TAFError(f"Role(s) {', '.join(non_existant_roles)} do not exist") def verify_signers_loaded(self, roles: List[str]): + """ + Verify that the signers associated with the specified keys were added to the signer cache. + Raise an error if that is not the case + """ not_loaded = [role for role in roles if role not in self.signer_cache] if len(not_loaded): raise SignersNotLoaded(roles=not_loaded) From 78a91b52ddc990bad43a80aa0ec297d20b4f7109 Mon Sep 17 00:00:00 2001 From: Renata Date: Fri, 20 Dec 2024 18:28:21 -0500 Subject: [PATCH 06/26] docs: update docstrings, comments and docs --- docs/developers/repository-classes.md | 4 +- taf/api/api_workflow.py | 22 +++++++++++ taf/api/roles.py | 6 +-- taf/auth_repo.py | 38 +------------------ taf/tests/conftest.py | 1 - .../test_repositoriesdb.py | 2 +- .../tuf/test_create_edit_repo/test_update.py | 6 +-- taf/tests/tuf/test_query_repo/conftest.py | 2 - .../tuf/test_query_repo/test_query_repo.py | 10 ++--- taf/tuf/keys.py | 30 +++++++-------- taf/tuf/repository.py | 4 +- taf/tuf/storage.py | 13 ++++++- 12 files changed, 65 insertions(+), 73 deletions(-) diff --git a/docs/developers/repository-classes.md b/docs/developers/repository-classes.md index b5c6bc49c..cd074210d 100644 --- a/docs/developers/repository-classes.md +++ b/docs/developers/repository-classes.md @@ -87,7 +87,9 @@ regardless of whether there are `metadata` files located at `path/metadata`. In metadata and target files from mediums other than the local file system. TUF enables such flexibility by allowing custom implementations of the `StorageBackendInterface`. These implementations can redefine how metadata and target files are read and written. To instantiate a `MetadataRepository` class with a custom storage interface, use the -`storage` keyword argument. If not specified, TUF's default `FilesystemBackend` will be used. +`storage` keyword argument. If not specified, TUF's default `FilesystemBackend` will be used. The other available +option is `GitStorageBackend`. This implementation loads data from a specific commit if the commit is specified, +or from the filesystem if the commit is `None`, by extending `FilesystemBackend`. This class is used extensively to implement API functions. diff --git a/taf/api/api_workflow.py b/taf/api/api_workflow.py index 4c542999b..a3208835e 100644 --- a/taf/api/api_workflow.py +++ b/taf/api/api_workflow.py @@ -41,6 +41,28 @@ def manage_repo_and_signers( commit_msg: Optional[str] = None, no_commit_warning: Optional[bool] = True, ): + """ + A context manager that loads all signers and adds them to the specified authentication repository's + signers cache. This allows for the execution of other methods without having to update the + signers cache manually. Optionally, at the end, the context manager commits and pushes all changes made + to the authentication repository and handles cleanup in case of an error. + + Arguments: + auth_repo (AuthenticationRepository): Already instantiated authentication repository. + roles (Optional[List[str]]): List of roles that are expected to be updated. + keystore (Optional[Union[str, Path]]): Path to the keystore containing signing keys. + scheme (Optional[str]): The signature scheme. + prompt_for_keys (Optional[bool]): If True, prompts for keys if not found. Defaults to False. + paths_to_reset_on_error (Optional[List[Union[str, Path]]]): Paths to reset if an error occurs. + load_roles (Optional[bool]): If True, loads signing keys of the roles specified using the argument of the same name. + load_parents (Optional[bool]): If true, loads sining keys of the specified roles' parents. + load_snapshot_and_timestamp (Optional[bool]): If True, loads snapshot and timestamp signing keys. + commit (Optional[bool]): If True, commits changes to the repository. + push (Optional[bool]): If True, pushes changes to the remote repository. + commit_key (Optional[str]): Commit key from `messages.py` + commit_msg (Optional[str]): The message to use for commits. + no_commit_warning (Optional[bool]): If True, suppresses warnings when not committing. + """ try: roles_to_load = set() if roles: diff --git a/taf/api/roles.py b/taf/api/roles.py index 18bb676ba..f39e6bb43 100644 --- a/taf/api/roles.py +++ b/taf/api/roles.py @@ -449,16 +449,16 @@ def revoke_signing_key( ( removed_from_roles, not_added_roles, - less_than_threshold_roless, + less_than_threshold_roles, ) = auth_repo.revoke_metadata_key(key_id=key_id, roles=roles) if not_added_roles: taf_logger.log( "NOTICE", f"Key is not a signing key of role(s) {', '.join(not_added_roles)}", ) - if less_than_threshold_roless: + if less_than_threshold_roles: taf_logger.warning( - f"Cannot remove key from {', '.join(less_than_threshold_roless)}. Number of keys must be greater or equal to thresholds" + f"Cannot remove key from {', '.join(less_than_threshold_roles)}. Number of keys must be greater or equal to thresholds" ) if len(removed_from_roles): diff --git a/taf/auth_repo.py b/taf/auth_repo.py index cc4a4c823..54842cc55 100644 --- a/taf/auth_repo.py +++ b/taf/auth_repo.py @@ -223,38 +223,6 @@ def commit_and_push( "Default branch is None, skipping last_validated_commit update." ) - def get_role_repositories(self, role, parent_role=None): - """Get repositories of the given role - - Args: - - role(str): TUF role (root, targets, timestamp, snapshot or delegated one) - - parent_role(str): Name of the parent role of the delegated role. If not specified, - it will be set automatically, but this might be slow if there - are many delegations. - - Returns: - Repositories' path from repositories.json that matches given role paths - - Raises: - - securesystemslib.exceptions.FormatError: If the arguments are improperly formatted. - - securesystemslib.exceptions.UnknownRoleError: If 'rolename' has not been delegated by this - """ - role_paths = self.get_role_paths(role, parent_role=parent_role) - - target_repositories = self._get_target_repositories() - return [ - repo - for repo in target_repositories - if any([fnmatch(repo, path) for path in role_paths]) - ] - - def _get_target_repositories(self): - repositories_path = self.targets_path / "repositories.json" - if repositories_path.exists(): - repositories = repositories_path.read_text() - repositories = json.loads(repositories)["repositories"] - return [str(Path(target_path).as_posix()) for target_path in repositories] - def get_target(self, target_name, commit=None, safely=True) -> Optional[Dict]: if commit is None: commit = self.head_commit_sha() @@ -309,10 +277,8 @@ def is_commit_authenticated(self, target_name: str, commit: str) -> bool: @contextmanager def repository_at_revision(self, commit: str): """ - Context manager which makes sure that TUF repository is instantiated - using metadata files at the specified revision. Creates a temp directory - and metadata files inside it. Deleted the temp directory when no longer - needed. + Context manager that enables reading metadata from an older commit. + This should be used in combination with the Git storage backend. """ self._storage.commit = commit yield diff --git a/taf/tests/conftest.py b/taf/tests/conftest.py index 764ee3648..3b3cc7bb9 100644 --- a/taf/tests/conftest.py +++ b/taf/tests/conftest.py @@ -127,7 +127,6 @@ def repo_path(request, repo_dir): full_path = repo_dir / test_name full_path.mkdir(parents=True) - # Convert to string if necessary, or use it as a Path object yield full_path shutil.rmtree(full_path, onerror=on_rm_error) diff --git a/taf/tests/test_repositoriesdb/test_repositoriesdb.py b/taf/tests/test_repositoriesdb/test_repositoriesdb.py index db220a396..ec8a47476 100644 --- a/taf/tests/test_repositoriesdb/test_repositoriesdb.py +++ b/taf/tests/test_repositoriesdb/test_repositoriesdb.py @@ -102,7 +102,7 @@ def _check_repositories_dict( if roles is not None and len(roles): only_load_targets = True if only_load_targets: - target_files_of_roles = auth_repo.get_singed_target_files_of_roles(roles) + target_files_of_roles = auth_repo.get_signed_target_files_of_roles(roles) for commit in commits: repositories_json = auth_repo.get_json( commit, repositoriesdb.REPOSITORIES_JSON_PATH diff --git a/taf/tests/tuf/test_create_edit_repo/test_update.py b/taf/tests/tuf/test_create_edit_repo/test_update.py index 88c49bdcd..a1cd2efe1 100644 --- a/taf/tests/tuf/test_create_edit_repo/test_update.py +++ b/taf/tests/tuf/test_create_edit_repo/test_update.py @@ -69,15 +69,15 @@ def test_add_new_role(tuf_repo, signers): def test_remove_delegated_paths(tuf_repo): - paths_to_remvoe = ["dir2/path1"] - tuf_repo.remove_delegated_paths({"delegated_role": paths_to_remvoe}) + paths_to_remove = ["dir2/path1"] + tuf_repo.remove_delegated_paths({"delegated_role": paths_to_remove }) assert tuf_repo.root().version == 1 assert tuf_repo.targets().version == 2 assert tuf_repo.timestamp().version == 1 assert tuf_repo.snapshot().version == 1 - for path in paths_to_remvoe: + for path in paths_to_remove : assert ( path not in tuf_repo.get_delegations_of_role("targets")["delegated_role"].paths diff --git a/taf/tests/tuf/test_query_repo/conftest.py b/taf/tests/tuf/test_query_repo/conftest.py index e19fef959..8bfd3b849 100644 --- a/taf/tests/tuf/test_query_repo/conftest.py +++ b/taf/tests/tuf/test_query_repo/conftest.py @@ -44,7 +44,6 @@ def tuf_repo_with_delegations( custom1 = {"custom_attr1": "custom_val1"} custom2 = {"custom_attr2": "custom_val2"} - "delegated role's targets" tuf_repo.add_target_files_to_role( { delegated_path1: {"target": "test1", "custom": custom1}, @@ -52,7 +51,6 @@ def tuf_repo_with_delegations( } ) - "inner delegated role's targets" path_delegated = "dir2/path2" tuf_repo.add_target_files_to_role( { diff --git a/taf/tests/tuf/test_query_repo/test_query_repo.py b/taf/tests/tuf/test_query_repo/test_query_repo.py index 18d5f773c..749363027 100644 --- a/taf/tests/tuf/test_query_repo/test_query_repo.py +++ b/taf/tests/tuf/test_query_repo/test_query_repo.py @@ -163,14 +163,14 @@ def test_all_target_files(tuf_repo_with_delegations): assert actual == {"test2", "test1", "dir2/path2", "dir1/path1", "dir2/path1"} -def test_get_singed_target_files_of_roles(tuf_repo_with_delegations): - actual = tuf_repo_with_delegations.get_singed_target_files_of_roles() +def test_get_signed_target_files_of_roles(tuf_repo_with_delegations): + actual = tuf_repo_with_delegations.get_signed_target_files_of_roles() assert actual == {"test2", "test1", "dir2/path2", "dir1/path1", "dir2/path1"} - actual = tuf_repo_with_delegations.get_singed_target_files_of_roles(["targets"]) + actual = tuf_repo_with_delegations.get_signed_target_files_of_roles(["targets"]) assert actual == {"test2", "test1"} - actual = tuf_repo_with_delegations.get_singed_target_files_of_roles(["targets"]) + actual = tuf_repo_with_delegations.get_signed_target_files_of_roles(["targets"]) assert actual == {"test2", "test1"} - actual = tuf_repo_with_delegations.get_singed_target_files_of_roles( + actual = tuf_repo_with_delegations.get_signed_target_files_of_roles( ["targets", "delegated_role"] ) assert actual == {"test2", "test1", "dir1/path1", "dir2/path1"} diff --git a/taf/tuf/keys.py b/taf/tuf/keys.py index 784102263..624201e96 100644 --- a/taf/tuf/keys.py +++ b/taf/tuf/keys.py @@ -3,7 +3,7 @@ """ -from typing import Optional, Union +from typing import Optional, Tuple, Union from pathlib import Path from securesystemslib.signer import ( @@ -27,8 +27,10 @@ from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME -def generate_rsa_keypair(key_size=3072, password=None): - # Generate private key +def generate_rsa_keypair(key_size=3072, password=None) -> Tuple[bytes, bytes]: + """ + Generate a private-public key pair. Returns the generated keys as bytes in PEM format.. + """ private_key = rsa.generate_private_key( public_exponent=65537, key_size=key_size, backend=default_backend() ) @@ -57,8 +59,11 @@ def generate_rsa_keypair(key_size=3072, password=None): return private_pem, public_pem -def generate_and_write_rsa_keypair(path, key_size, password): - +def generate_and_write_rsa_keypair(path, key_size, password) -> bytes: + """ + Generate a private-public key pair and write and save it to files. + Returns the private key in PEM format. + """ if not password: password = None private_pem, public_pem = generate_rsa_keypair(key_size, password) @@ -72,21 +77,12 @@ def generate_and_write_rsa_keypair(path, key_size, password): return private_pem -def _get_key_name(role_name: str, key_num: int, num_of_keys: int) -> str: - """ - Return a keystore key's name based on the role's name and total number of signing keys, - as well as the specified counter. If number of signing keys is one, return the role's name. - If the number of signing keys is greater that one, return role's name + counter (root1, root2...) - """ - if num_of_keys == 1: - return role_name - else: - return role_name + str(key_num + 1) - - def get_sslib_key_from_value( key: str, scheme: str = DEFAULT_RSA_SIGNATURE_SCHEME ) -> SSlibKey: + """ + Converts a key from its string representation into an SSlibKey object. + """ key_val = key.encode() crypto_key = load_pem_public_key(key_val, backend=default_backend()) return _from_crypto(crypto_key, scheme=scheme) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index f0a7dfb2e..3af7cdc2a 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -936,9 +936,9 @@ def get_signed_target_files(self) -> Set[str]: - Set of all target paths relative to targets directory """ all_roles = self.get_all_targets_roles() - return self.get_singed_target_files_of_roles(all_roles) + return self.get_signed_target_files_of_roles(all_roles) - def get_singed_target_files_of_roles( + def get_signed_target_files_of_roles( self, roles: Optional[List] = None ) -> Set[str]: """Return all target files signed by the specified roles diff --git a/taf/tuf/storage.py b/taf/tuf/storage.py index 8a41b201d..c665e791f 100644 --- a/taf/tuf/storage.py +++ b/taf/tuf/storage.py @@ -67,12 +67,16 @@ class GitStorageBackend(FilesystemBackend): commit: Optional[str] = None def __new__(cls, *args, **kwargs): + # Bypass singleton + # This is necessary in order to use this within the context of + # parallel update of multiple repositories return super(FilesystemBackend, cls).__new__( cls, *args, **kwargs - ) # Bypass singleton - + ) @contextmanager def get(self, filepath: str): + # If the commit is specified, read from Git. + # If it is not specified, read from the filesystem. if self.commit is None: with super().get(filepath=filepath) as value_from_base: yield value_from_base @@ -89,6 +93,9 @@ def get(self, filepath: str): raise StorageError(e) def getsize(self, filepath: str) -> int: + # Get size of a file after reading it from Git or the filesystem. + # If the commit is specified, read from Git. + # If it is not specified, read from the filesystem. if self.commit is None: return super().getsize(filepath=filepath) try: @@ -103,6 +110,8 @@ def getsize(self, filepath: str) -> int: raise StorageError(e) def put(self, fileobj: IO, filepath: str, restrict: Optional[bool] = False) -> None: + # Write the file to the filesystem. + # Raise an error if the repository is a bare repository. repo_path = pygit2.discover_repository(filepath) if repo_path: repo = find_git_repository(filepath) From cc45f38d4d148f7a71fd0f3a8f0f6fb69a892ce6 Mon Sep 17 00:00:00 2001 From: Renata Date: Fri, 20 Dec 2024 19:53:21 -0500 Subject: [PATCH 07/26] chore: mypy and formatting fixes --- taf/keys.py | 2 +- .../tuf/test_create_edit_repo/test_update.py | 6 +- taf/tuf/repository.py | 77 ++++++++++++------- taf/tuf/storage.py | 5 +- 4 files changed, 57 insertions(+), 33 deletions(-) diff --git a/taf/keys.py b/taf/keys.py index 472097cb6..68c4862b8 100644 --- a/taf/keys.py +++ b/taf/keys.py @@ -489,7 +489,7 @@ def _invalid_key_message(key_name, keystore): signer = load_signer_from_pem(private_pem) else: _, private_pem = generate_rsa_keypair(key_size=length) - print(f"{role_name} key:\n\n{private_pem}\n\n") + print(f"{role_name} key:\n\n{private_pem.decode()}\n\n") signer = load_signer_from_pem(private_pem) return signer diff --git a/taf/tests/tuf/test_create_edit_repo/test_update.py b/taf/tests/tuf/test_create_edit_repo/test_update.py index a1cd2efe1..5fe044650 100644 --- a/taf/tests/tuf/test_create_edit_repo/test_update.py +++ b/taf/tests/tuf/test_create_edit_repo/test_update.py @@ -69,15 +69,15 @@ def test_add_new_role(tuf_repo, signers): def test_remove_delegated_paths(tuf_repo): - paths_to_remove = ["dir2/path1"] - tuf_repo.remove_delegated_paths({"delegated_role": paths_to_remove }) + paths_to_remove = ["dir2/path1"] + tuf_repo.remove_delegated_paths({"delegated_role": paths_to_remove}) assert tuf_repo.root().version == 1 assert tuf_repo.targets().version == 2 assert tuf_repo.timestamp().version == 1 assert tuf_repo.snapshot().version == 1 - for path in paths_to_remove : + for path in paths_to_remove: assert ( path not in tuf_repo.get_delegations_of_role("targets")["delegated_role"].paths diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 3af7cdc2a..2ce9753ab 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -307,15 +307,19 @@ def add_path_to_delegated_role(self, role: str, paths: List[str]) -> bool: raise TAFError(f"Role {role} does not exist") parent_role = self.find_delegated_roles_parent(role) + if parent_role is None: + return False if all( path in self.get_delegations_of_role(parent_role)[role].paths for path in paths ): return False - self.verify_signers_loaded([parent_role]) - with self.edit(parent_role) as parent: - parent.delegations.roles[role].paths.extend(paths) - return True + if parent_role: + self.verify_signers_loaded([parent_role]) + with self.edit(parent_role) as parent: + parent.delegations.roles[role].paths.extend(paths) + return True + return False def add_new_roles_to_snapshot(self, roles: List[str]) -> None: """ @@ -415,10 +419,17 @@ def check_roles_expiration_dates( return expired_dict, will_expire_dict - def _create_target_file(self, target_path: Path, target_data: Union[str, Dict]) -> None: + def _create_target_file(self, target_path: Path, target_data: Dict) -> None: """ Writes the specified data to a target file and stores it on disk. - If the target data is a dictionary, the data is written in JSON format. + Target data is of the following form: + { + target: content of the target file, string or Dict (json) + custom: { + custom_field1: custom_value1, + custom_field2: custom_value2 + } + } """ # if the target's parent directory should not be "targets", create # its parent directories if they do not exist @@ -700,14 +711,14 @@ def find_parents_of_roles(self, roles: List[str]): parents.add(parent) return parents - def get_delegations_of_role(self, role_name: str) -> List: + def get_delegations_of_role(self, role_name: str) -> Dict: """ - Return a list of delegated roles of the specified target role + Return a dictionary of delegated roles of the specified target role """ signed_obj = self.signed_obj(role_name) if signed_obj.delegations: return signed_obj.delegations.roles - return [] + return {} def get_keyids_of_role(self, role_name: str) -> List: """ @@ -732,7 +743,9 @@ def get_targets_of_role(self, role_name: str): """ return self.signed_obj(role_name).targets - def find_keys_roles(self, public_keys: List, check_threshold: Optional[bool]=True) -> List: + def find_keys_roles( + self, public_keys: List, check_threshold: Optional[bool] = True + ) -> List: """Find all roles that can be signed by the provided keys. A role can be signed by the list of keys if at least the number of keys that can sign that file is equal to or greater than the role's @@ -741,13 +754,15 @@ def find_keys_roles(self, public_keys: List, check_threshold: Optional[bool]=Tru key_ids = [_get_legacy_keyid(public_key) for public_key in public_keys] return self.find_keysid_roles(key_ids=key_ids, check_threshold=check_threshold) - def find_keysid_roles(self, key_ids: List, check_threshold: Optional[bool]=True) -> List: + def find_keysid_roles( + self, key_ids: List, check_threshold: Optional[bool] = True + ) -> List: """Find all roles that can be signed by the provided keys. A role can be signed by the list of keys if at least the number of keys that can sign that file is equal to or greater than the role's threshold """ - roles = [] + roles: List[Tuple[str, Optional[str]]] = [] for role in MAIN_ROLES: roles.append((role, None)) keys_roles = [] @@ -797,7 +812,7 @@ def get_all_targets_roles(self) -> List: return all_roles - def get_all_target_files_state(self) -> List: + def get_all_target_files_state(self) -> Tuple: """Create dictionaries of added/modified and removed files by comparing current file-system state with current signed targets (and delegations) metadata state. @@ -810,8 +825,8 @@ def get_all_target_files_state(self) -> List: Raises: - None """ - added_target_files = {} - removed_target_files = {} + added_target_files: Dict = {} + removed_target_files: Dict = {} # current fs state fs_target_files = self.all_target_files() @@ -888,7 +903,7 @@ def get_role_paths(self, role): raise TAFError(f"Role {role} does not exist") return role.paths - def get_role_from_target_paths(self, target_paths: List) -> str: + def get_role_from_target_paths(self, target_paths: List) -> Optional[str]: """ Find a common role that can be used to sign given target paths. @@ -993,6 +1008,8 @@ def get_target_file_custom_data(self, target_path: str) -> Optional[Dict]: """ try: role = self.get_role_from_target_paths([target_path]) + if role is None: + return None target_obj = self.get_targets_of_role(role).get(target_path) if target_obj: return target_obj.custom @@ -1000,7 +1017,9 @@ def get_target_file_custom_data(self, target_path: str) -> Optional[Dict]: except KeyError: raise TAFError(f"Target {target_path} does not exist") - def get_target_file_hashes(self, target_path: str, hash_func: str=HASH_FUNCTION) -> str: + def get_target_file_hashes( + self, target_path: str, hash_func: str = HASH_FUNCTION + ) -> Optional[str]: """ Return hashes of the given target path. @@ -1009,9 +1028,11 @@ def get_target_file_hashes(self, target_path: str, hash_func: str=HASH_FUNCTION) """ try: role = self.get_role_from_target_paths([target_path]) + if role is None: + return None targets_of_role = self.get_targets_of_role(role) if target_path not in targets_of_role: - return None, None + return None hashes = targets_of_role[target_path].hashes if hash_func not in hashes: raise TAFError(f"Invalid hashing algorithm {hash_func}") @@ -1019,7 +1040,9 @@ def get_target_file_hashes(self, target_path: str, hash_func: str=HASH_FUNCTION) except KeyError: raise TAFError(f"Target {target_path} does not exist") - def get_key_length_and_scheme_from_metadata(self, parent_role: str, keyid: str) -> Tuple: + def get_key_length_and_scheme_from_metadata( + self, parent_role: str, keyid: str + ) -> Tuple: """ Return length and signing scheme of the specified key id. This data is specified in metadata files (root or a target role that has delegations) @@ -1210,7 +1233,9 @@ def map_signing_roles(self, target_filenames: List) -> Dict: return roles_targets - def modify_targets(self, added_data: Optional[Dict]=None, removed_data: Optional[Dict]=None) -> None: + def modify_targets( + self, added_data: Optional[Dict] = None, removed_data: Optional[Dict] = None + ) -> Targets: """Creates a target.json file containing a repository's commit for each repository. Adds those files to the tuf repository. @@ -1239,7 +1264,7 @@ def modify_targets(self, added_data: Optional[Dict]=None, removed_data: Optional Custom is an optional property which, if present, will be used to specify a TUF target's Returns: - - Role name used to update given targets + - Role whose targets were updates """ added_data = {} if added_data is None else added_data removed_data = {} if removed_data is None else removed_data @@ -1248,7 +1273,7 @@ def modify_targets(self, added_data: Optional[Dict]=None, removed_data: Optional raise TargetsError("Nothing to be modified!") target_paths = list(data.keys()) - targets_role = self.get_role_from_target_paths(data) + targets_role = self.get_role_from_target_paths(target_paths) if targets_role is None: raise TargetsError( f"Could not find a common role for target paths:\n{'-'.join(target_paths)}" @@ -1283,7 +1308,7 @@ def _modify_tarets_role( added_target_files: List[TargetFile], removed_paths: List[str], role_name: Optional[str] = Targets.type, - ) -> None: + ) -> Targets: """Add target files to top-level targets metadata.""" with self.edit_targets(rolename=role_name) as targets: for target_file in added_target_files: @@ -1395,7 +1420,7 @@ def roles_targets_for_filenames(self, target_filenames): roles_targets_mapping.setdefault(role_name, []).append(target_filename) return roles_targets_mapping - def _role_obj(self, role: str, parent: Optional[str]=None): + def _role_obj(self, role: str, parent: Optional[str] = None): """ Return TUF's role object for the specified role """ @@ -1407,7 +1432,7 @@ def _role_obj(self, role: str, parent: Optional[str]=None): except (KeyError, ValueError): raise TAFError("root.json is invalid") else: - parent_name = self.find_delegated_roles_parent(role, parent) + parent_name = self.find_delegated_roles_parent(role) if parent_name is None: return None md = self.open(parent_name) @@ -1536,7 +1561,7 @@ def update_target_role(self, role: str, target_paths: Dict): self._modify_tarets_role(target_files, removed_paths, role) - def update_snapshot_and_timestamp(self, force: Optional[bool]=True): + def update_snapshot_and_timestamp(self, force: Optional[bool] = True): """ Update timestamp and snapshot roles. If force is true, update them even if their content was not modified diff --git a/taf/tuf/storage.py b/taf/tuf/storage.py index c665e791f..0372d4c2b 100644 --- a/taf/tuf/storage.py +++ b/taf/tuf/storage.py @@ -70,9 +70,8 @@ def __new__(cls, *args, **kwargs): # Bypass singleton # This is necessary in order to use this within the context of # parallel update of multiple repositories - return super(FilesystemBackend, cls).__new__( - cls, *args, **kwargs - ) + return super(FilesystemBackend, cls).__new__(cls, *args, **kwargs) + @contextmanager def get(self, filepath: str): # If the commit is specified, read from Git. From 22dd4b848c7e8d5c7cc3b4a4cd3c22596c170c73 Mon Sep 17 00:00:00 2001 From: Renata Date: Fri, 20 Dec 2024 19:59:11 -0500 Subject: [PATCH 08/26] refact: update validation.py imports --- taf/validation.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/taf/validation.py b/taf/validation.py index e71f629f3..856a6d43a 100644 --- a/taf/validation.py +++ b/taf/validation.py @@ -1,9 +1,8 @@ from pathlib import Path -from tuf.repository_tool import TARGETS_DIRECTORY_NAME, METADATA_DIRECTORY_NAME -from taf.repository_tool import Repository, get_target_path -from taf.constants import CAPSTONE +from taf.constants import CAPSTONE, METADATA_DIRECTORY_NAME, TARGETS_DIRECTORY_NAME from taf.exceptions import GitError, InvalidBranchError +from taf.tuf.repository import MetadataRepository, get_target_path def validate_branch( @@ -237,7 +236,7 @@ def _compare_commit_with_targets_metadata( def _get_unchanged_targets_metadata(auth_repo, updated_roles): - taf_repo = Repository(auth_repo.path) + taf_repo = MetadataRepository(auth_repo.path) all_roles = taf_repo.get_all_targets_roles() all_roles = [*(set(all_roles) - set(updated_roles))] return all_roles From 8b208321b1ea6eae69f8dd5a67c9afcef443d346 Mon Sep 17 00:00:00 2001 From: n-dusan Date: Sat, 21 Dec 2024 22:34:15 +0100 Subject: [PATCH 09/26] feat: introduce cli tests with click `CliRunner` The idea being we cover all the key cli commands with tests. - `click` supports CLI tests [1]. To get started, we initialize a `CliRunner` and invoke the taf command that we want to test. Thankfully, testing is relatively straightforward. In cases where the CLI expects a user input, such as pressing ENTER or a "[y/N]" answer, `runner` supports an `input` param that gets passed into the subprocess stdin. Moreover, we can take the output of the cli test and assert the print/logging statements that occurred, which is really cool. This should make adding new cli tests relatively easy. - When asserting CLI output, such as logging statements (when the command began and when it finished executing), with the `caplog` built-in pytest fixture, things get funky since we use `loguru` instead of built-in python logging module. To resolve, we patch the `caplog` fixture in `conftest.py` to point to the `loguru` module. Added a docstring explaining it in more detail in `conftest.py` - Added ~14 cli tests that should cover all the important flows that we use. I managed to get most of them working, but a couple of them seem to be having slight issues with asserts and expected states. I've added comments to those to debug easier. - Since cli tests share a lot of the fixtures that `test_api.conftest` has, slightly re-organized the `test_api` module to avoid duplicating code/functions. The existing tests are now in their own subdirectory (e.g. `test_api\dependencies\api\` `test_api\roles\api`, etc.), while the newly added tests are in the sibling `cli` directory (e.g. `test_api\dependencies\cli\`...). The nice thing is is that this is complementary to the api functions, so when adding a new api test, we can easily add a cli test. [1] - https://click.palletsprojects.com/en/stable/testing/ --- taf/tests/test_api/conf/__init__.py | 0 taf/tests/test_api/conf/api/__init__.py | 0 taf/tests/test_api/conf/cli/__init__.py | 0 .../test_api/conf/cli/test_conf_init_cmd.py | 22 ++ taf/tests/test_api/conftest.py | 171 +++++++++++---- taf/tests/test_api/dependencies/__init__.py | 0 .../test_api/dependencies/api/__init__.py | 0 .../api}/test_dependencies.py | 31 +-- .../test_api/dependencies/cli/__init__.py | 0 .../cli/test_dependencies_cmds.py | 81 ++++++++ taf/tests/test_api/metadata/__init__.py | 0 taf/tests/test_api/metadata/api/__init__.py | 0 .../{ => metadata/api}/test_metadata.py | 0 taf/tests/test_api/metadata/cli/__init__.py | 0 taf/tests/test_api/repo/__init__.py | 0 taf/tests/test_api/repo/api/__init__.py | 0 .../{ => repo/api}/test_create_repository.py | 0 taf/tests/test_api/repo/cli/__init__.py | 0 .../test_api/repo/cli/test_repo_create_cmd.py | 86 ++++++++ taf/tests/test_api/roles/__init__.py | 0 taf/tests/test_api/roles/api/__init__.py | 0 .../test_api/{ => roles/api}/test_roles.py | 44 +--- taf/tests/test_api/roles/cli/__init__.py | 0 .../test_api/roles/cli/test_roles_cmds.py | 196 ++++++++++++++++++ taf/tests/test_api/targets/__init__.py | 0 taf/tests/test_api/targets/api/__init__.py | 0 .../{ => targets/api}/test_targets.py | 52 +---- taf/tests/test_api/targets/cli/__init__.py | 0 .../test_api/targets/cli/test_targets_cmds.py | 126 +++++++++++ taf/tests/test_api/util.py | 20 ++ taf/tests/utils.py | 4 + 31 files changed, 677 insertions(+), 156 deletions(-) create mode 100644 taf/tests/test_api/conf/__init__.py create mode 100644 taf/tests/test_api/conf/api/__init__.py create mode 100644 taf/tests/test_api/conf/cli/__init__.py create mode 100644 taf/tests/test_api/conf/cli/test_conf_init_cmd.py create mode 100644 taf/tests/test_api/dependencies/__init__.py create mode 100644 taf/tests/test_api/dependencies/api/__init__.py rename taf/tests/test_api/{ => dependencies/api}/test_dependencies.py (85%) create mode 100644 taf/tests/test_api/dependencies/cli/__init__.py create mode 100644 taf/tests/test_api/dependencies/cli/test_dependencies_cmds.py create mode 100644 taf/tests/test_api/metadata/__init__.py create mode 100644 taf/tests/test_api/metadata/api/__init__.py rename taf/tests/test_api/{ => metadata/api}/test_metadata.py (100%) create mode 100644 taf/tests/test_api/metadata/cli/__init__.py create mode 100644 taf/tests/test_api/repo/__init__.py create mode 100644 taf/tests/test_api/repo/api/__init__.py rename taf/tests/test_api/{ => repo/api}/test_create_repository.py (100%) create mode 100644 taf/tests/test_api/repo/cli/__init__.py create mode 100644 taf/tests/test_api/repo/cli/test_repo_create_cmd.py create mode 100644 taf/tests/test_api/roles/__init__.py create mode 100644 taf/tests/test_api/roles/api/__init__.py rename taf/tests/test_api/{ => roles/api}/test_roles.py (87%) create mode 100644 taf/tests/test_api/roles/cli/__init__.py create mode 100644 taf/tests/test_api/roles/cli/test_roles_cmds.py create mode 100644 taf/tests/test_api/targets/__init__.py create mode 100644 taf/tests/test_api/targets/api/__init__.py rename taf/tests/test_api/{ => targets/api}/test_targets.py (85%) create mode 100644 taf/tests/test_api/targets/cli/__init__.py create mode 100644 taf/tests/test_api/targets/cli/test_targets_cmds.py diff --git a/taf/tests/test_api/conf/__init__.py b/taf/tests/test_api/conf/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/conf/api/__init__.py b/taf/tests/test_api/conf/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/conf/cli/__init__.py b/taf/tests/test_api/conf/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/conf/cli/test_conf_init_cmd.py b/taf/tests/test_api/conf/cli/test_conf_init_cmd.py new file mode 100644 index 000000000..dea1313c8 --- /dev/null +++ b/taf/tests/test_api/conf/cli/test_conf_init_cmd.py @@ -0,0 +1,22 @@ +from pathlib import Path + +from click.testing import CliRunner + +from taf.tools.cli.taf import taf + + +def test_init_conf_cmd_expect_success(keystore): + runner = CliRunner() + with runner.isolated_filesystem(): + cwd = Path.cwd() + runner.invoke( + taf, + [ + "conf", + "init", + "--keystore", + f"{str(keystore)}", + ], + ) + assert (cwd / ".taf" / "config.toml").exists() + assert (cwd / ".taf" / "keystore").exists() diff --git a/taf/tests/test_api/conftest.py b/taf/tests/test_api/conftest.py index 0cce6a20f..b3eea6cd0 100644 --- a/taf/tests/test_api/conftest.py +++ b/taf/tests/test_api/conftest.py @@ -1,15 +1,19 @@ -import json -import pytest -from pathlib import Path import shutil import uuid +from pathlib import Path +from typing import Dict + +import pytest +from _pytest.logging import LogCaptureFixture +from loguru import logger from taf.api.repository import create_repository from taf.auth_repo import AuthenticationRepository -from taf.tests.conftest import TEST_DATA_PATH +from taf.git import GitRepository +from taf.tests.conftest import CLIENT_DIR_PATH, KEYSTORES_PATH, TEST_DATA_PATH +from taf.tests.utils import copy_mirrors_json, copy_repositories_json, read_json from taf.utils import on_rm_error - REPOSITORY_DESCRIPTION_INPUT_DIR = TEST_DATA_PATH / "repository_description_inputs" TEST_INIT_DATA_PATH = Path(__file__).parent.parent / "init_data" NO_DELEGATIONS_INPUT = REPOSITORY_DESCRIPTION_INPUT_DIR / "no_delegations.json" @@ -25,9 +29,45 @@ INVALID_PATH_INPUT = REPOSITORY_DESCRIPTION_INPUT_DIR / "invalid_path.json" OLD_YUBIKEY_INPUT = REPOSITORY_DESCRIPTION_INPUT_DIR / "with_old_yubikey.json" +AUTH_REPO_NAME = "auth" +DEPENDENCY_NAME = "dependency/auth" -def _read_json(path): - return json.loads(Path(path).read_text()) + +@pytest.fixture(scope="module") +def api_repo_path(repo_dir): + path = repo_dir / "api" / "auth" + yield path + shutil.rmtree(path.parent, onerror=on_rm_error) + + +@pytest.fixture(scope="session") +def no_delegations_json_input(): + return read_json(NO_DELEGATIONS_INPUT) + + +@pytest.fixture(scope="session") +def with_delegations_json_input(): + return read_json(WITH_DELEGATIONS_INPUT) + + +@pytest.fixture(scope="session") +def invalid_public_key_json_input(): + return read_json(INVALID_PUBLIC_KEY_INPUT) + + +@pytest.fixture(scope="session") +def invalid_keys_number_json_input(): + return read_json(INVALID_KEYS_NUMBER_INPUT) + + +@pytest.fixture(scope="session") +def invalid_path_input(): + return read_json(INVALID_PATH_INPUT) + + +@pytest.fixture(scope="session") +def with_old_yubikey_input(): + return read_json(OLD_YUBIKEY_INPUT) @pytest.fixture @@ -68,21 +108,9 @@ def auth_repo_with_delegations( yield auth_repo -@pytest.fixture(scope="module") -def api_repo_path(repo_dir): - path = repo_dir / "api" / "auth" - yield path - shutil.rmtree(path.parent, onerror=on_rm_error) - - -@pytest.fixture(scope="session") -def no_delegations_json_input(): - return _read_json(NO_DELEGATIONS_INPUT) - - @pytest.fixture(scope="session") def no_yubikeys_json_input(): - return _read_json(NO_YUBIKEYS_INPUT) + return read_json(NO_YUBIKEYS_INPUT) @pytest.fixture(scope="session") @@ -90,26 +118,99 @@ def no_yubikeys_path(): return str(NO_YUBIKEYS_INPUT) -@pytest.fixture(scope="session") -def with_delegations_json_input(): - return _read_json(WITH_DELEGATIONS_INPUT) +@pytest.fixture(scope="module") +def library(repo_dir): + random_name = str(uuid.uuid4()) + root_dir = repo_dir / random_name + # create an initialize some target repositories + # their content is not important + auth_path = root_dir / AUTH_REPO_NAME + auth_path.mkdir(exist_ok=True, parents=True) + targets = ("target1", "target2", "target3", "new_target") + for target in targets: + target_repo_path = root_dir / target + target_repo_path.mkdir() + target_repo = GitRepository(path=target_repo_path) + target_repo.init_repo() + target_repo.commit_empty("Initial commit") + yield root_dir + shutil.rmtree(root_dir, onerror=on_rm_error) + + +@pytest.fixture(scope="function") +def auth_repo_when_add_repositories_json( + library: Path, + with_delegations_no_yubikeys_path: str, + keystore_delegations: str, + repositories_json_template: Dict, + mirrors_json_path: Path, +): + repo_path = library / "auth" + namespace = library.name + copy_repositories_json(repositories_json_template, namespace, repo_path) + copy_mirrors_json(mirrors_json_path, repo_path) + create_repository( + str(repo_path), + roles_key_infos=with_delegations_no_yubikeys_path, + keystore=keystore_delegations, + commit=True, + ) + auth_reo = AuthenticationRepository(path=repo_path) + yield auth_reo + shutil.rmtree(repo_path, onerror=on_rm_error) -@pytest.fixture(scope="session") -def invalid_public_key_json_input(): - return _read_json(INVALID_PUBLIC_KEY_INPUT) +def _init_auth_repo_dir(): + random_name = str(uuid.uuid4()) + root_dir = CLIENT_DIR_PATH / random_name + auth_path = root_dir / AUTH_REPO_NAME + auth_path.mkdir(exist_ok=True, parents=True) + return auth_path -@pytest.fixture(scope="session") -def invalid_keys_number_json_input(): - return _read_json(INVALID_KEYS_NUMBER_INPUT) +@pytest.fixture(scope="module") +def child_repo_path(): + repo_path = _init_auth_repo_dir() + yield repo_path + shutil.rmtree(str(repo_path.parent), onerror=on_rm_error) -@pytest.fixture(scope="session") -def invalid_path_input(): - return _read_json(INVALID_PATH_INPUT) +@pytest.fixture(scope="module") +def parent_repo_path(): + repo_path = _init_auth_repo_dir() + yield repo_path + shutil.rmtree(str(repo_path.parent), onerror=on_rm_error) -@pytest.fixture(scope="session") -def with_old_yubikey_input(): - return _read_json(OLD_YUBIKEY_INPUT) +@pytest.fixture(scope="module") +def roles_keystore(keystore_delegations): + # set up a keystore by copying the api keystore + # new keystore files are expected to be created and store to this directory + # it will be removed once this test's execution is done + # Create the destination folder if it doesn't exist + roles_keystore = KEYSTORES_PATH / "roles_keystore" + if roles_keystore.is_dir(): + shutil.rmtree(str(roles_keystore)) + + # Copy the contents of the source folder to the destination folder + shutil.copytree(keystore_delegations, str(roles_keystore)) + yield str(roles_keystore) + shutil.rmtree(str(roles_keystore)) + + +@pytest.fixture +def caplog(caplog: LogCaptureFixture): + """ + Override pytest capture logging (caplog fixture) to point to loguru logging instead. + This is because we use loguru logging instead of the default logging module. + Source: https://loguru.readthedocs.io/en/stable/resources/migration.html#replacing-caplog-fixture-from-pytest-library + """ + handler_id = logger.add( + caplog.handler, + format="{message}", + level=0, + filter=lambda record: record["level"].no >= caplog.handler.level, + enqueue=False, # Set to 'True' if your test is spawning child processes. + ) + yield caplog + logger.remove(handler_id) diff --git a/taf/tests/test_api/dependencies/__init__.py b/taf/tests/test_api/dependencies/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/dependencies/api/__init__.py b/taf/tests/test_api/dependencies/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/test_dependencies.py b/taf/tests/test_api/dependencies/api/test_dependencies.py similarity index 85% rename from taf/tests/test_api/test_dependencies.py rename to taf/tests/test_api/dependencies/api/test_dependencies.py index 18d4803c7..24c6e0adf 100644 --- a/taf/tests/test_api/test_dependencies.py +++ b/taf/tests/test_api/dependencies/api/test_dependencies.py @@ -1,5 +1,3 @@ -import shutil -import uuid import pytest from pathlib import Path from taf.api.dependencies import add_dependency, remove_dependency @@ -8,34 +6,7 @@ import taf.repositoriesdb as repositoriesdb from taf.auth_repo import AuthenticationRepository from taf.api.repository import create_repository -from taf.tests.conftest import CLIENT_DIR_PATH -from taf.utils import on_rm_error - - -AUTH_REPO_NAME = "auth" -DEPENDENCY_NAME = "dependency/auth" - - -def _init_auth_repo_dir(): - random_name = str(uuid.uuid4()) - root_dir = CLIENT_DIR_PATH / random_name - auth_path = root_dir / AUTH_REPO_NAME - auth_path.mkdir(exist_ok=True, parents=True) - return auth_path - - -@pytest.fixture(scope="module") -def child_repo_path(): - repo_path = _init_auth_repo_dir() - yield repo_path - shutil.rmtree(str(repo_path.parent), onerror=on_rm_error) - - -@pytest.fixture(scope="module") -def parent_repo_path(): - repo_path = _init_auth_repo_dir() - yield repo_path - shutil.rmtree(str(repo_path.parent), onerror=on_rm_error) +from taf.tests.test_api.conftest import DEPENDENCY_NAME def test_setup_repositories( diff --git a/taf/tests/test_api/dependencies/cli/__init__.py b/taf/tests/test_api/dependencies/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/dependencies/cli/test_dependencies_cmds.py b/taf/tests/test_api/dependencies/cli/test_dependencies_cmds.py new file mode 100644 index 000000000..dcbebf362 --- /dev/null +++ b/taf/tests/test_api/dependencies/cli/test_dependencies_cmds.py @@ -0,0 +1,81 @@ +import json + +from click.testing import CliRunner + +from taf.api.repository import create_repository +from taf.auth_repo import AuthenticationRepository +from taf.tests.test_api.conftest import DEPENDENCY_NAME +from taf.tools.cli.taf import taf + + +def test_dependencies_add_cmd_expect_success( + parent_repo_path, + child_repo_path, + with_delegations_no_yubikeys_path, + keystore_delegations, +): + for path in (child_repo_path, parent_repo_path): + create_repository( + str(path), + roles_key_infos=with_delegations_no_yubikeys_path, + keystore=keystore_delegations, + commit=True, + ) + runner = CliRunner() + + parent_auth_repo = AuthenticationRepository(path=parent_repo_path) + child_auth_repo = AuthenticationRepository(path=child_repo_path) + + assert not (parent_auth_repo.path / "targets" / "dependencies.json").exists() + + runner.invoke( + taf, + [ + "dependencies", + "add", + DEPENDENCY_NAME, + "--path", + f"{str(parent_auth_repo.path)}", + "--dependency-path", + f"{child_auth_repo.path}", + "--keystore", + f"{str(keystore_delegations)}", + ], + input="y\n", # pass in y to resolve Proceed? prompt + ) + assert (parent_auth_repo.path / "targets" / "dependencies.json").exists() + + dependencies_json = json.loads( + (parent_auth_repo.path / "targets" / "dependencies.json").read_text() + ) + dependencies = dependencies_json["dependencies"][DEPENDENCY_NAME] + assert ( + child_auth_repo.head_commit_sha() in dependencies["out-of-band-authentication"] + ) + assert child_auth_repo.default_branch in dependencies["branch"] + + +def test_dependencies_remove_cmd_expect_success( + parent_repo_path, + keystore_delegations, +): + runner = CliRunner() + + parent_auth_repo = AuthenticationRepository(path=parent_repo_path) + + runner.invoke( + taf, + [ + "dependencies", + "remove", + DEPENDENCY_NAME, + "--path", + f"{str(parent_auth_repo.path)}", + "--keystore", + f"{str(keystore_delegations)}", + ], + ) + dependencies_json = json.loads( + (parent_auth_repo.path / "targets" / "dependencies.json").read_text() + ) + assert DEPENDENCY_NAME not in dependencies_json["dependencies"].keys() diff --git a/taf/tests/test_api/metadata/__init__.py b/taf/tests/test_api/metadata/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/metadata/api/__init__.py b/taf/tests/test_api/metadata/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/test_metadata.py b/taf/tests/test_api/metadata/api/test_metadata.py similarity index 100% rename from taf/tests/test_api/test_metadata.py rename to taf/tests/test_api/metadata/api/test_metadata.py diff --git a/taf/tests/test_api/metadata/cli/__init__.py b/taf/tests/test_api/metadata/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/repo/__init__.py b/taf/tests/test_api/repo/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/repo/api/__init__.py b/taf/tests/test_api/repo/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/test_create_repository.py b/taf/tests/test_api/repo/api/test_create_repository.py similarity index 100% rename from taf/tests/test_api/test_create_repository.py rename to taf/tests/test_api/repo/api/test_create_repository.py diff --git a/taf/tests/test_api/repo/cli/__init__.py b/taf/tests/test_api/repo/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/repo/cli/test_repo_create_cmd.py b/taf/tests/test_api/repo/cli/test_repo_create_cmd.py new file mode 100644 index 000000000..e429877e1 --- /dev/null +++ b/taf/tests/test_api/repo/cli/test_repo_create_cmd.py @@ -0,0 +1,86 @@ +from pathlib import Path + +from click.testing import CliRunner + +from taf.tools.cli.taf import taf + + +def test_repo_create_cmd_expect_success( + keystore_delegations, with_delegations_no_yubikeys_path, caplog +): + runner = CliRunner() + with runner.isolated_filesystem(): + result = runner.invoke( + taf, + [ + "repo", + "create", + ".\\test-law\\", + "--keys-description", + f"{str(with_delegations_no_yubikeys_path)}", + "--keystore", + f"{str(keystore_delegations)}", + "--no-commit", + "--test", + ], + ) + # logging statements are captured by caplog + # while print statements are captured by pytest CliRunner result object + output = caplog.text + # TODO: expected to have these asserts + assert "Please commit manually" in result.output + assert "Finished creating a new repository" in output + + cwd = Path.cwd() + assert (cwd / "test-law" / "metadata").exists() + assert (cwd / "test-law" / "targets").exists() + # TODO: actually have this. hopefully once issue is resolved error should get removed from assert + assert "An error occurred while signing target files" in output + assert "An error occurred while creating a new repository" in output + + +def test_repo_create_cmd_when_repo_already_created_expect_error( + keystore_delegations, with_delegations_no_yubikeys_path, caplog +): + runner = CliRunner() + with runner.isolated_filesystem(): + result = runner.invoke( + taf, + [ + "repo", + "create", + ".\\test-law\\", + "--keys-description", + f"{str(with_delegations_no_yubikeys_path)}", + "--keystore", + f"{str(keystore_delegations)}", + "--no-commit", + "--test", + ], + ) + cwd = Path.cwd() + assert (cwd / "test-law" / "metadata").exists() + assert (cwd / "test-law" / "targets").exists() + + output = caplog.text + assert "Finished creating a new repository" in output + # run the same command again + result = runner.invoke( + taf, + [ + "repo", + "create", + ".\\test-law\\", + "--keys-description", + f"{str(with_delegations_no_yubikeys_path)}", + "--keystore", + f"{str(keystore_delegations)}", + "--no-commit", + "--test", + ], + ) + # TODO: expected to have this output, instead get same error as first test + assert ( + '"test-law" is a git repository containing the metadata directory. Generating new metadata files could make the repository invalid. Aborting' + in result.output + ) diff --git a/taf/tests/test_api/roles/__init__.py b/taf/tests/test_api/roles/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/roles/api/__init__.py b/taf/tests/test_api/roles/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/test_roles.py b/taf/tests/test_api/roles/api/test_roles.py similarity index 87% rename from taf/tests/test_api/test_roles.py rename to taf/tests/test_api/roles/api/test_roles.py index d4041f9cb..13f23b1dc 100644 --- a/taf/tests/test_api/test_roles.py +++ b/taf/tests/test_api/roles/api/test_roles.py @@ -1,7 +1,4 @@ -import shutil -import pytest from pathlib import Path -from typing import List from taf.api.roles import ( add_role, add_role_paths, @@ -13,23 +10,7 @@ ) from taf.messages import git_commit_message from taf.auth_repo import AuthenticationRepository -from taf.tests.conftest import KEYSTORES_PATH - - -@pytest.fixture(scope="module") -def roles_keystore(keystore_delegations): - # set up a keystore by copying the api keystore - # new keystore files are expected to be created and store to this directory - # it will be removed once this test's execution is done - # Create the destination folder if it doesn't exist - roles_keystore = KEYSTORES_PATH / "roles_keystore" - if roles_keystore.is_dir(): - shutil.rmtree(str(roles_keystore)) - - # Copy the contents of the source folder to the destination folder - shutil.copytree(keystore_delegations, str(roles_keystore)) - yield str(roles_keystore) - shutil.rmtree(str(roles_keystore)) +from taf.tests.test_api.util import check_new_role def test_add_role_when_target_is_parent( @@ -55,7 +36,7 @@ def test_add_role_when_target_is_parent( commits = auth_repo.list_commits() assert len(commits) == initial_commits_num + 1 assert commits[0].message.strip() == git_commit_message("add-role", role=ROLE_NAME) - _check_new_role(auth_repo, ROLE_NAME, PATHS, roles_keystore, PARENT_NAME) + check_new_role(auth_repo, ROLE_NAME, PATHS, roles_keystore, PARENT_NAME) def test_add_role_when_delegated_role_is_parent( @@ -81,7 +62,7 @@ def test_add_role_when_delegated_role_is_parent( commits = auth_repo_with_delegations.list_commits() assert len(commits) == initial_commits_num + 1 assert commits[0].message.strip() == git_commit_message("add-role", role=ROLE_NAME) - _check_new_role( + check_new_role( auth_repo_with_delegations, ROLE_NAME, PATHS, roles_keystore, PARENT_NAME ) @@ -299,22 +280,3 @@ def test_revoke_signing_key(auth_repo: AuthenticationRepository, roles_keystore: targets_keys_infos = list_keys_of_role(str(auth_repo.path), "targets") assert len(targets_keys_infos) == 1 assert commits[0].message.strip() == COMMIT_MSG - - -def _check_new_role( - auth_repo: AuthenticationRepository, - role_name: str, - paths: List[str], - keystore_path: str, - parent_name: str, -): - # check if keys were created - assert Path(keystore_path, f"{role_name}1").is_file() - assert Path(keystore_path, f"{role_name}2").is_file() - assert Path(keystore_path, f"{role_name}1.pub").is_file() - assert Path(keystore_path, f"{role_name}2.pub").is_file() - target_roles = auth_repo.get_all_targets_roles() - assert role_name in target_roles - assert auth_repo.find_delegated_roles_parent(role_name) == parent_name - roles_paths = auth_repo.get_role_paths(role_name) - assert roles_paths == paths diff --git a/taf/tests/test_api/roles/cli/__init__.py b/taf/tests/test_api/roles/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/roles/cli/test_roles_cmds.py b/taf/tests/test_api/roles/cli/test_roles_cmds.py new file mode 100644 index 000000000..ac0223bc5 --- /dev/null +++ b/taf/tests/test_api/roles/cli/test_roles_cmds.py @@ -0,0 +1,196 @@ +import json + +from pathlib import Path + +from click.testing import CliRunner + +from taf.api.roles import list_keys_of_role +from taf.tests.test_api.util import check_new_role +from taf.tools.cli.taf import taf + + +def test_roles_add_cmd_expect_success(auth_repo_with_delegations, roles_keystore): + runner = CliRunner() + + with runner.isolated_filesystem(): + # cli expects a config file, so we manually create config pass it to the cli + cwd = Path.cwd() + config = { + "parent_role": "targets", + "delegated_path": [ + "/delegated_path_inside_targets1", + "/delegated_path_inside_targets2", + ], + "keys_number": 1, + "threshold": 1, + "yubikey": False, + "scheme": "rsa-pkcs1v15-sha256", + } + config_file_path = cwd / "config.json" + with open(config_file_path, "w") as f: + json.dump(config, f) + runner.invoke( + taf, + [ + "roles", + "add", + "rolename1", + "--path", + f"{str(auth_repo_with_delegations.path)}", + "--config-file", + f"{str(config_file_path)}", + "--keystore", + f"{str(roles_keystore)}", + ], + ) + # TODO: there seems to be an assertion error here + check_new_role( + auth_repo_with_delegations, + "rolename1", + ["/delegated_path_inside_targets1", "/delegated_path_inside_targets2"], + str(roles_keystore), + "targets", + ) + + +def test_roles_add_multiple_cmd_expect_success( + auth_repo, with_delegations_no_yubikeys_path, roles_keystore +): + runner = CliRunner() + + with runner.isolated_filesystem(): + runner.invoke( + taf, + [ + "roles", + "add-multiple", + f"{str(with_delegations_no_yubikeys_path)}", + "--path", + f"{str(auth_repo.path)}", + "--keystore", + f"{str(roles_keystore)}", + ], + ) + new_roles = ["delegated_role", "inner_role"] + target_roles = auth_repo.get_all_targets_roles() + for role_name in new_roles: + assert role_name in target_roles + assert auth_repo.find_delegated_roles_parent("delegated_role") == "targets" + assert auth_repo.find_delegated_roles_parent("inner_role") == "delegated_role" + + +def test_roles_add_role_paths_cmd_expect_success( + auth_repo_with_delegations, roles_keystore +): + runner = CliRunner() + + with runner.isolated_filesystem(): + new_paths = ["some-path3"] + role_name = "delegated_role" + runner.invoke( + taf, + [ + "roles", + "add-role-paths", + f"{role_name}", + "--path", + f"{str(auth_repo_with_delegations.path)}", + "--delegated-path", + f"{new_paths[0]}", + "--keystore", + f"{str(roles_keystore)}", + ], + ) + roles_paths = auth_repo_with_delegations.get_role_paths(role_name) + assert len(roles_paths) == 3 + assert "some-path3" in roles_paths + + +def test_roles_add_signing_key_cmd_expect_success(auth_repo, roles_keystore): + runner = CliRunner() + + with runner.isolated_filesystem(): + pub_key_path = Path(roles_keystore, "targets1.pub") + runner.invoke( + taf, + [ + "roles", + "add-signing-key", + "--path", + f"{str(auth_repo.path)}", + "--role", + "snapshot", + "--role", + "timestamp", + "--pub-key-path", + f"{pub_key_path}", + "--keystore", + f"{str(roles_keystore)}", + "--no-commit", + ], + ) + timestamp_keys_infos = list_keys_of_role(str(auth_repo.path), "timestamp") + assert len(timestamp_keys_infos) == 2 + snapshot_keys_infos = list_keys_of_role(str(auth_repo.path), "snapshot") + assert len(snapshot_keys_infos) == 2 + + +def test_revoke_key_cmd_expect_success(auth_repo, roles_keystore): + runner = CliRunner() + + targets_keys_infos = list_keys_of_role(str(auth_repo.path), "targets") + assert len(targets_keys_infos) == 2 + + with runner.isolated_filesystem(): + targest_keyids = auth_repo.get_keyids_of_role("targets") + key_to_remove = targest_keyids[-1] + runner.invoke( + taf, + [ + "roles", + "revoke-key", + f"{key_to_remove}", + "--path", + f"{str(auth_repo.path)}", + "--keystore", + f"{str(roles_keystore)}", + "--no-commit", + ], + ) + targets_keys_infos = list_keys_of_role(str(auth_repo.path), "targets") + assert len(targets_keys_infos) == 1 + # reset to head so that next test can run as expected + auth_repo.reset_to_head() + + +def test_rotate_key_cmd_expect_success(auth_repo, roles_keystore): + runner = CliRunner() + + with runner.isolated_filesystem(): + targest_keyids = auth_repo.get_keyids_of_role("targets") + key_to_rotate = targest_keyids[-1] + pub_key_path = Path(roles_keystore, "delegated_role1.pub") + + assert len(targest_keyids) == 2 + + runner.invoke( + taf, + [ + "roles", + "rotate-key", + f"{key_to_rotate}", + "--path", + f"{str(auth_repo.path)}", + "--pub-key-path", + f"{pub_key_path}", + "--keystore", + f"{str(roles_keystore)}", + "--no-commit", + ], + ) + new_targets_keyids = auth_repo.get_keyids_of_role("targets") + + assert len(new_targets_keyids) == 2 + # TODO: this assert does not pass. I assumed that the rotated key would not be in targets keyids, + # but I might have misunderstood what I needed to assert + assert key_to_rotate not in new_targets_keyids diff --git a/taf/tests/test_api/targets/__init__.py b/taf/tests/test_api/targets/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/targets/api/__init__.py b/taf/tests/test_api/targets/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/test_targets.py b/taf/tests/test_api/targets/api/test_targets.py similarity index 85% rename from taf/tests/test_api/test_targets.py rename to taf/tests/test_api/targets/api/test_targets.py index 560bfe7e1..37201825a 100644 --- a/taf/tests/test_api/test_targets.py +++ b/taf/tests/test_api/targets/api/test_targets.py @@ -1,15 +1,10 @@ from pathlib import Path -import shutil -import pytest -from typing import Dict -import uuid + from taf.constants import TARGETS_DIRECTORY_NAME from taf.messages import git_commit_message import taf.repositoriesdb as repositoriesdb from taf.auth_repo import AuthenticationRepository -from taf.git import GitRepository -from taf.tests.utils import copy_mirrors_json, copy_repositories_json -from taf.api.repository import create_repository + from taf.api.targets import ( add_target_repo, register_target_files, @@ -19,54 +14,11 @@ check_if_targets_signed, check_target_file, ) -from taf.utils import on_rm_error AUTH_REPO_NAME = "auth" -@pytest.fixture(scope="module") -def library(repo_dir): - random_name = str(uuid.uuid4()) - root_dir = repo_dir / random_name - # create an initialize some target repositories - # their content is not important - auth_path = root_dir / AUTH_REPO_NAME - auth_path.mkdir(exist_ok=True, parents=True) - targets = ("target1", "target2", "target3", "new_target") - for target in targets: - target_repo_path = root_dir / target - target_repo_path.mkdir() - target_repo = GitRepository(path=target_repo_path) - target_repo.init_repo() - target_repo.commit_empty("Initial commit") - yield root_dir - shutil.rmtree(root_dir, onerror=on_rm_error) - - -@pytest.fixture(scope="function") -def auth_repo_when_add_repositories_json( - library: Path, - with_delegations_no_yubikeys_path: str, - keystore_delegations: str, - repositories_json_template: Dict, - mirrors_json_path: Path, -): - repo_path = library / "auth" - namespace = library.name - copy_repositories_json(repositories_json_template, namespace, repo_path) - copy_mirrors_json(mirrors_json_path, repo_path) - create_repository( - str(repo_path), - roles_key_infos=with_delegations_no_yubikeys_path, - keystore=keystore_delegations, - commit=True, - ) - auth_reo = AuthenticationRepository(path=repo_path) - yield auth_reo - shutil.rmtree(repo_path, onerror=on_rm_error) - - def test_register_targets_when_file_added( auth_repo_when_add_repositories_json: AuthenticationRepository, library: Path, diff --git a/taf/tests/test_api/targets/cli/__init__.py b/taf/tests/test_api/targets/cli/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/taf/tests/test_api/targets/cli/test_targets_cmds.py b/taf/tests/test_api/targets/cli/test_targets_cmds.py new file mode 100644 index 000000000..9345a8872 --- /dev/null +++ b/taf/tests/test_api/targets/cli/test_targets_cmds.py @@ -0,0 +1,126 @@ +import json +from pathlib import Path + +from click.testing import CliRunner + +from taf.constants import TARGETS_DIRECTORY_NAME +from taf.tests.test_api.util import check_if_targets_signed +from taf.tools.cli.taf import taf + + +def test_targets_sign_when_target_file_is_added_expect_success( + auth_repo_when_add_repositories_json, + library, + keystore_delegations, +): + runner = CliRunner() + + repo_path = library / "auth" + FILENAME = "test.txt" + file_path = repo_path / TARGETS_DIRECTORY_NAME / FILENAME + file_path.write_text("test") + + runner.invoke( + taf, + [ + "targets", + "sign", + "--path", + f"{str(auth_repo_when_add_repositories_json.path)}", + "--keystore", + f"{str(keystore_delegations)}", + ], + ) + + check_if_targets_signed(auth_repo_when_add_repositories_json, "targets", FILENAME) + + +def test_targets_sign_when_target_file_is_removed_expect_success( + auth_repo_when_add_repositories_json, + library, + keystore_delegations, +): + runner = CliRunner() + + repo_path = library / "auth" + FILENAME = "test.txt" + file_path = repo_path / TARGETS_DIRECTORY_NAME / FILENAME + file_path.write_text("test") + + runner.invoke( + taf, + [ + "targets", + "sign", + "--path", + f"{str(auth_repo_when_add_repositories_json.path)}", + "--keystore", + f"{str(keystore_delegations)}", + ], + ) + check_if_targets_signed(auth_repo_when_add_repositories_json, "targets", FILENAME) + + file_path.unlink() + + runner.invoke( + taf, + [ + "targets", + "sign", + "--path", + f"{str(auth_repo_when_add_repositories_json.path)}", + "--keystore", + f"{str(keystore_delegations)}", + ], + ) + + signed_target_files = auth_repo_when_add_repositories_json.get_signed_target_files() + assert FILENAME not in signed_target_files + + +def test_targets_add_repo_cmd_expect_success( + auth_repo_when_add_repositories_json, library, keystore_delegations +): + runner = CliRunner() + + namespace = library.name + target_repo_name = f"{namespace}/target4" + + with runner.isolated_filesystem(): + # cli expects a config file, so we manually create config pass it to the cli + cwd = Path.cwd() + config = { + "allow-unauthenticated-commits": True, + "type": "html", + "serve": "latest", + "location_regex": "/", + "routes": [".*"], + } + config_file_path = cwd / "config.json" + with open(config_file_path, "w") as f: + json.dump(config, f) + runner.invoke( + taf, + [ + "targets", + "add-repo", + f"{target_repo_name}", + "--role", + "delegated_role", + "--path", + f"{str(auth_repo_when_add_repositories_json.path)}", + "--custom-file", + f"{str(config_file_path)}", + "--keystore", + f"{str(keystore_delegations)}", + ], + ) + delegated_paths = auth_repo_when_add_repositories_json.get_paths_of_role( + "delegated_role" + ) + assert target_repo_name in delegated_paths + + +def test_targets_remove_repo_cmd_expect_success(): + # TODO: seems like it is not fully supported yet + pass diff --git a/taf/tests/test_api/util.py b/taf/tests/test_api/util.py index ab6e4e6b1..43cc164f7 100644 --- a/taf/tests/test_api/util.py +++ b/taf/tests/test_api/util.py @@ -2,6 +2,7 @@ from typing import Optional from taf.auth_repo import AuthenticationRepository from taf.git import GitRepository +from typing import List def check_target_file( @@ -45,3 +46,22 @@ def check_if_targets_removed( for target_file in targets_filenames: assert target_file not in target_files assert target_file not in signed_target_files + + +def check_new_role( + auth_repo: AuthenticationRepository, + role_name: str, + paths: List[str], + keystore_path: str, + parent_name: str, +): + # check if keys were created + assert Path(keystore_path, f"{role_name}1").is_file() + assert Path(keystore_path, f"{role_name}2").is_file() + assert Path(keystore_path, f"{role_name}1.pub").is_file() + assert Path(keystore_path, f"{role_name}2.pub").is_file() + target_roles = auth_repo.get_all_targets_roles() + assert role_name in target_roles + assert auth_repo.find_delegated_roles_parent(role_name) == parent_name + roles_paths = auth_repo.get_role_paths(role_name) + assert roles_paths == paths diff --git a/taf/tests/utils.py b/taf/tests/utils.py index 21279720b..1b03cfa01 100644 --- a/taf/tests/utils.py +++ b/taf/tests/utils.py @@ -5,6 +5,10 @@ import shutil +def read_json(path): + return json.loads(Path(path).read_text()) + + def copy_repositories_json( repositories_json_template: Dict, namespace: str, auth_repo_path: Path ): From 82377f531483082279b20d3b9e2e470576f55918 Mon Sep 17 00:00:00 2001 From: Renata Vaderna Date: Mon, 23 Dec 2024 20:58:52 +0100 Subject: [PATCH 10/26] chore: release 0.33.0 (#574) --- CHANGELOG.md | 11 ++++++++++- setup.py | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d28a0689..d6e082be3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,14 @@ and this project adheres to [Semantic Versioning][semver]. ### Added +### Changed + +### Fixed + +## [0.33.0] + +### Added + - Add tests for `get_last_remote_commit` and `reset_to_commit` ([573]) - Remove unused optional parameter from _yk_piv_ctrl ([572]) - Implement full partial update. Store last validated commit per repo ([559)]) @@ -1377,7 +1385,8 @@ and this project adheres to [Semantic Versioning][semver]. [keepachangelog]: https://keepachangelog.com/en/1.0.0/ [semver]: https://semver.org/spec/v2.0.0.html -[unreleased]: https://github.com/openlawlibrary/taf/compare/v0.32.4...HEAD +[unreleased]: https://github.com/openlawlibrary/taf/compare/v0.33.0...HEAD +[0.33.0]: https://github.com/openlawlibrary/taf/compare/v0.32.4...v0.33.0 [0.32.4]: https://github.com/openlawlibrary/taf/compare/v0.32.3...v0.32.4 [0.32.3]: https://github.com/openlawlibrary/taf/compare/v0.32.2...v0.32.3 [0.32.2]: https://github.com/openlawlibrary/taf/compare/v0.32.1...v0.32.2 diff --git a/setup.py b/setup.py index a9ce5ac33..610414b0f 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import find_packages, setup PACKAGE_NAME = "taf" -VERSION = "0.32.4" +VERSION = "0.33.0" AUTHOR = "Open Law Library" AUTHOR_EMAIL = "info@openlawlib.org" DESCRIPTION = "Implementation of archival authentication" From 005b293e2efe022591a6587993148b882926f49a Mon Sep 17 00:00:00 2001 From: Renata Date: Mon, 23 Dec 2024 20:46:05 -0500 Subject: [PATCH 11/26] test: fix failing cli tests --- taf/api/api_workflow.py | 2 +- taf/api/roles.py | 14 ++++--- .../keystores/keystore_delegations/new_role1 | 39 +++++++++++++++++++ .../keystore_delegations/new_role1.pub | 11 ++++++ .../keystores/keystore_delegations/new_role2 | 39 +++++++++++++++++++ .../keystore_delegations/new_role2.pub | 11 ++++++ .../test_api/repo/cli/test_repo_create_cmd.py | 21 +++++----- .../test_api/roles/cli/test_roles_cmds.py | 12 +++--- .../tuf/test_create_edit_repo/test_update.py | 1 + taf/tools/roles/__init__.py | 8 ++-- taf/tuf/repository.py | 3 +- 11 files changed, 133 insertions(+), 28 deletions(-) create mode 100644 taf/tests/data/keystores/keystore_delegations/new_role1 create mode 100644 taf/tests/data/keystores/keystore_delegations/new_role1.pub create mode 100644 taf/tests/data/keystores/keystore_delegations/new_role2 create mode 100644 taf/tests/data/keystores/keystore_delegations/new_role2.pub diff --git a/taf/api/api_workflow.py b/taf/api/api_workflow.py index a3208835e..e8073bdeb 100644 --- a/taf/api/api_workflow.py +++ b/taf/api/api_workflow.py @@ -93,7 +93,7 @@ def manage_repo_and_signers( auth_repo.add_signers_to_cache({role: keystore_signers}) auth_repo.add_signers_to_cache({role: yubikey_signers}) yield - if auth_repo.something_to_commit() and commit: + if commit and auth_repo.something_to_commit(): if not commit_msg and commit_key: commit_msg = git_commit_message(commit_key) auth_repo.commit_and_push(commit_msg=commit_msg, push=push) diff --git a/taf/api/roles.py b/taf/api/roles.py index f39e6bb43..64b02d858 100644 --- a/taf/api/roles.py +++ b/taf/api/roles.py @@ -473,10 +473,10 @@ def rotate_signing_key( roles: Optional[List[str]] = None, keystore: Optional[str] = None, scheme: Optional[str] = DEFAULT_RSA_SIGNATURE_SCHEME, - commit: Optional[bool] = True, prompt_for_keys: Optional[bool] = False, push: Optional[bool] = True, - commit_msg: Optional[str] = None, + revoke_commit_msg: Optional[str] = None, + add_commit_msg: Optional[str] = None, ) -> None: """ Rotate signing key. Remove it from one or more roles and add a new signing key. @@ -494,7 +494,8 @@ def rotate_signing_key( prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. commit (optional): Indicates if the changes should be committed and pushed automatically. push (optional): Flag specifying whether to push to remote. - commit_msg(optional): Commit message. Will be necessary to enter it if not provided. + revoke_commit_msg(optional): First commit message, when revokig the specified key. Will be necessary to enter it if not provided. + add_commit_msg(optional): Second commit message, when addug a new signing key. Will be necessary to enter it if not provided. Side Effects: Updates metadata files (parents of the affected roles, snapshot and timestamp). Writes changes to disk. @@ -516,9 +517,10 @@ def rotate_signing_key( roles=roles, keystore=keystore, scheme=scheme, - commit=commit, + commit=True, prompt_for_keys=prompt_for_keys, push=False, + commit_msg=revoke_commit_msg, ) add_signing_key( @@ -527,10 +529,10 @@ def rotate_signing_key( pub_key=pub_key, keystore=keystore, scheme=scheme, - commit=commit, + commit=True, prompt_for_keys=prompt_for_keys, push=push, - commit_msg=commit_msg, + commit_msg=add_commit_msg, ) diff --git a/taf/tests/data/keystores/keystore_delegations/new_role1 b/taf/tests/data/keystores/keystore_delegations/new_role1 new file mode 100644 index 000000000..9ddd045cd --- /dev/null +++ b/taf/tests/data/keystores/keystore_delegations/new_role1 @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG4gIBAAKCAYEAuORhNgvRwRbtldqmD0S9PmzuLP1cnoi0pe09gmAD0FOQflZy +4APsoYYPOCr6gplCP9gElhuAd+1ldzAAwgjILGWxwWhEFdjJ7r2nhtNoXAgjHRJx +4f2YjRIIpXRfDPwMW04MxZXudh9WM3zOdhcspJ93CEFF7fxkJunrpkhjVNOrTDWY +W7QUsEj2/ifxvscNL0RIbXX9967HNFULsnJ2/nfe9Y/DJdHkUDiP9bpelcIlJ2wY +n1tvTIzT2JbzRrIKLgTQyx1DdB5kSADugYdh3JaSkH28qA67zFgm9RAgMyfPkrVz +k1mKP/z6wYkbEy+vyyueXCVcwnGW+RZMlw8dDU1ZJrYPKyAvxioyOHNxxYM1to1m +9LDvAUc96dnvVurGrVxvHpaipynD1nYsMUmBJCeZgYlTi9muXh+pgXk+yDOc24jH +hrpfC7QAJKO+4qVdst2I3zCLUxifkpzqeL2557GkBI0cma6VbQYlahqLaVsMeE8T +FDp3Kz4wmvuF5uazAgMBAAECggGAEh1mHNp5mZZ6fqUmgfZ1KCmaCFRmf63bLSqa +TSzFEuMtFAO6S5J227h7w0AKvULwx7qNcHuPUbCzsULFwD0GB7uK9+0URqOv3TE9 +uar63ZF6hz2oZMDo8mFi8Xr+WRJUz5lNDQrMi0w0sOS4gb9xg0uQaQGkLVX+JgXj +La6H9OasMNJLdCinokHz2SDmwY9VDl19TyQxVtQL9meitsAaQoJSGPMV5p2y5d00 +1ZmF7NxRsZQYsXxO4kwl8WGQwkttIrtWFYC75hk/Z4kHjc1qG5NJ1F3BW5ANcwDQ +lYg8MAqbyEof2ER2DeaCTbDDJlXdgPGco8BZC+2ba0B8E51xtKosdQRJiezMYy5J +gIVn79CLjVEBGkLUxZZUwD/iBVumqFkiJyIbih7oUseJJca1aTRKmIDiZAhg2p81 +cwKqDErHHLNg3L9fXTMLOysfp35/6FGxBHqzqIgXcXyzJZ/dl/2v17ZBePmgovLg +pQ5ke8Wo7qRv3WJa0FthcUsbx+qZAoHBAP8He7m86f/NXpybBIOAIpmXm/gbrwpC +qg3rm7xeiL1tCVVXZ2MHQ41rjrgdSdP9QKrGMn07Pjc4tt1bqzLvHV18oo64fzXy +8kdoluxKSfUCNW9G85PtJrRhpAvSnoto4j9kRNDtJ4kUWPqE7mliwrcktx9OuEqE +RpqjfzCrMHzZea29sR6o87vfRX6dHTR9GwsMzQHF8bYoUHMXdstuiLyKrmXsz/Ra ++q5MwrbfraCv9OWDDgs/Qk+YNaI5E0IUNQKBwQC5mIzkIdGMCGZ7BP0OdKbXnMoJ +B65GyUcqlBxKpF3CFlIvQeUFh5GEfydfxnJGSwovdk152L6RVgGGOTO0/7Ej4Rg5 +yjBNl9aDjbWonv2Vjtxm1FQR8jBXnhokpsgS7RmTQWKMU3TPH/cNOez+VBj44lvp +w9ckW6EipnayPZuDQXE5QY5oYKy24GTDCVmIRoFMF6XPeS/KerIsysJTcZco3lMO +nWGwAe2R+G9XXq/tgHGgN9h37a7UTf9ALuOXnEcCgcAbai2FsOYipmwGP6/DhxGx +GxgcGrW9T59CMdKi9DKU0lTPhL7LaWt8l1RXPGbEUBQUh4vD5ItymjkmIIWNyyCH +/S7oUrLyFLSwsnCO5AmBOgSOer0SaMrhVyGwV6rNZ6/yio/POb8nQDW0cHfEgmZW +E69PwUGUWRXR58NzcuOaeDJZV+vjVNwmlQC+dJtAGja/AFhFWYb7QugrBxmxEqfG +RM4sjMFqDiGmfP/tcqwSevfDeEwZL2qsbbtOPf5w+wUCgcAtQcrRcoGzoPTEeNHw +bXelyiDmFM5lin1lH5rKhMwsIN9HkMz1DTrp0UvbqfuBspi4PCPmW3kU3aEfhuFZ ++KPMeP48UVZ4BVeU2sB6btKtXpnWJV6exa0OIIqFd3oAS3raEq6iQ1OPkl7fBcoJ +tp4kSqZZGZ1jy0g+t9Ln4egDGLkwWhEM2M4lBhDsEmKXvYGX+YhAUG/b8xFxpLvA +N0nB+HzOaohAsCerWaZk6r0BsDmE9Tk+/WGNebuNfiGXfc8CgcBb25p6CR73MfUi +np7aZeW/LdkvzxfQ8yms3Nhrl3mdmH1Ass9cH4sGlKAGYKwPWciE6lHycycSOVpq +sjvoZrxvUQ1Y4rV0Rs8vayLPOs4BcdJeHXKFVeevaCgRQnuMMdIIzvup6E5IJc2v +lcKn22Y/Ac3qpOcDloCHm5j17UeGzTTYMcC1p77QU+Y87f7C83m8N99R5iD6kiGN +a0KBzHlkpHClTVMxioVIGNv3dmcc7tjxJZ9jhHC59nE/HIOyrxM= +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/taf/tests/data/keystores/keystore_delegations/new_role1.pub b/taf/tests/data/keystores/keystore_delegations/new_role1.pub new file mode 100644 index 000000000..898b4f135 --- /dev/null +++ b/taf/tests/data/keystores/keystore_delegations/new_role1.pub @@ -0,0 +1,11 @@ +-----BEGIN PUBLIC KEY----- +MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAuORhNgvRwRbtldqmD0S9 +PmzuLP1cnoi0pe09gmAD0FOQflZy4APsoYYPOCr6gplCP9gElhuAd+1ldzAAwgjI +LGWxwWhEFdjJ7r2nhtNoXAgjHRJx4f2YjRIIpXRfDPwMW04MxZXudh9WM3zOdhcs +pJ93CEFF7fxkJunrpkhjVNOrTDWYW7QUsEj2/ifxvscNL0RIbXX9967HNFULsnJ2 +/nfe9Y/DJdHkUDiP9bpelcIlJ2wYn1tvTIzT2JbzRrIKLgTQyx1DdB5kSADugYdh +3JaSkH28qA67zFgm9RAgMyfPkrVzk1mKP/z6wYkbEy+vyyueXCVcwnGW+RZMlw8d +DU1ZJrYPKyAvxioyOHNxxYM1to1m9LDvAUc96dnvVurGrVxvHpaipynD1nYsMUmB +JCeZgYlTi9muXh+pgXk+yDOc24jHhrpfC7QAJKO+4qVdst2I3zCLUxifkpzqeL25 +57GkBI0cma6VbQYlahqLaVsMeE8TFDp3Kz4wmvuF5uazAgMBAAE= +-----END PUBLIC KEY----- \ No newline at end of file diff --git a/taf/tests/data/keystores/keystore_delegations/new_role2 b/taf/tests/data/keystores/keystore_delegations/new_role2 new file mode 100644 index 000000000..a0736ce74 --- /dev/null +++ b/taf/tests/data/keystores/keystore_delegations/new_role2 @@ -0,0 +1,39 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIG4wIBAAKCAYEAmvqUhn3+ZEswkaIjgSMdCVnw+/h6CFxpVJmIelmxWYXEhuJi +mELHWDExC2GQzziPpqo9ITTQBf5E8P76lxDkeifMUYsMCLOenMrdkdThXkv1tjSM +CyTJ7betkJw9zMdWabAfClUuRQWNgTa2pbieSYHqBXJMyWZqCR2H8mTQ+mEmln7y +lKOWjXMBBYLCtc2ElIAgXV8A4C01kq+xTsXKxzYlLRtOfH+pRvcOLDESFQ2li9Dl +czltKN5mRAJqopTBW7Ia9xOwmuNfzspJosyNKMrGHAtHf4uOB3oEoCac46QiIi81 +Pl4XR5ZLsxmp+HFHnBcIvGlfzZdgKxNK7w0409NI5Vgb/p+c3YnDnv+qQ2DnH2kN +hsvVTIB80Xw9QTvUK9dcwkj/EXleYDiZc2Nb7E/w76CunIqSRskdPL4nl3stTB09 +RYL+VZQxwMLriHxkqIm50z6MnK5hxUv6q6DvUG0rBg91CUYTDLzdQoBjKRne4Wrb +FfUljcvSaqgQrU6HAgMBAAECggGAKIIa1FSWa8yjc014DkcJTepubM3zx7+v4GcJ +H0HWc1ndlowRzU6XIFwrP5hO63sTQTL6K3XMceSWTI+5HEdUEQHaC+5WROf+K2lz +JK0KA0XDgc6WVEtXZIVAHq5YEPCBi7p3QpIlN/FNnVqZvxNUfE4yxx2rKHFWge9w +G1Fyth8yoN+ptGRV8779o10cW7zOTKp4yy8L4YyvlhnKNJbKe+uRKAsdJrmPm3b0 +A2UIHuykOrltYALAn5ASFvCywrAyufS8BYDe195u0QR4GypaFWJvgQonLXW65IVM +4tJes1z0exZ0cTUpJ+BBiK77ILeZTKBFmjd9Br+PwoiU2ICu++3no3yE6Fk/YnSk +yyj8jKnKadNgCMwHPuR3A5rIWsEixmHul68d8CTTRpzMBpoZm7ngNiW5aI7sAF87 +rT8ieCW8tlvcZlwvV928ejemGXn0JVH1UwpzkwAor1jtA++hb1k63/gmkKgFVloN +j7ScNQ8hLbL0Op9ukHVgiXqABMQxAoHBANAiMu6/jbE2HFvJsLJ9q/tT2fEA9RRR +RbSOqh/mJzrvLsRQUtsdxXhtkMJO38UUynrtMeDem6n9wNjVCJD6rGc6Xbs/R2Ua +EbDFPr4BzaPn6hD+0RDD/rxNSLLA36VhaFhAe+IFb900IfgHsuQoL8gejET+LUZk ++RtzBZqI1z08EoFfh7FYmvlnN9CeRJ5xM7NtzI0OXOg9eCQEFKj861xIreAbXuJV +mzjl6TbeZVXGBvnYNjTrQLNudzpy5748/QKBwQC+nuk5e51bebHO1Q/nGG7Tbk0b +NEhxEoZXCollJ0zVoqGHnJjs5XswqoR3+Qf0u5h5haAQR60uQv8GUeAyDYjrA871 +T33g5PvFgB05+YhAVKCV0NFOXesGBvBzOztVO6Cl0ZDmCW5HB4YP5ceiHoFyijl5 ++e/7TxsbkPkcFXTT86bZy+zXI2YE+R0Wm8kAOHqVyo8jdeDlQQFPPbsTo064to2w +Wr9fPdnbrgGtsFbuB0OVIwJoh+shnwH05Gg4UtMCgcEAxmsz/yOiYwTg+ChJSYBB +SrJfnUB6ZEoul7lCOnLhh2+qOAETXEz/ipV5YaRr86ikd5hU6rmN0PtWs+Az8HLp +lOexn+btm1bE8q6359A0SUO4g0dJ7B/NY5qR6cex7in0nd2rvIfOYyVmFNzSEGy3 +UKK+uq9OXkO4sBBxkSdPetMgGTIHXGzKIWXjcgDQDfSBg1bzoK3GqKihNkSlpYyo +nCu1h2bQiBlwh0e3k1VlaeYFlH4o/z4fSm/PPmt4voXJAoHAfAXImK3k4+950Kiv +gBxVfxr08A5EU81JurgQTNAVHaqCjklE9l0YmcFYDvboRkMIIYjfa7g25TKR2vrK +c8Z6nu4LaXAe5oQVi5qfaWkBTVnCYbdLd0GD+JfrOg3/vKTfEQQY0pKwPWaXwyAt +kz1l27AzVTlY+pmteXIJokwThxOwK2SS5CcT6YhrdJpHXO1iVLNGDjxT5tU0lOoF +HfHS9jtQVL22ZbFIXbYJQYjKBnSTdCUjG//S7D0YeM1jQcIhAoHANMhqZrkucALp +YEVxLG/vnrZoz0uc4Zpulsr3lXH0dOxOzAmhE4k1UBUImygIf7FNZeX23yMxi0VL +16EA89scd/R2b5OZ6G22uMNRbvtqOdWLKpLs1rQUnOaOP89vCCKVt/tQxu8++FWA +z98RFISRLmllcMa3jKIDxXuts/UI7dLChXuYNLjjvb2LY1/yYz6v9NinYpINq1AI +DWe9lCaBYzpCQX/1TtkHfQYBH6S/will9kTGcZXt5DP9qy4XBIfp +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/taf/tests/data/keystores/keystore_delegations/new_role2.pub b/taf/tests/data/keystores/keystore_delegations/new_role2.pub new file mode 100644 index 000000000..9574e51b3 --- /dev/null +++ b/taf/tests/data/keystores/keystore_delegations/new_role2.pub @@ -0,0 +1,11 @@ +-----BEGIN PUBLIC KEY----- +MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAmvqUhn3+ZEswkaIjgSMd +CVnw+/h6CFxpVJmIelmxWYXEhuJimELHWDExC2GQzziPpqo9ITTQBf5E8P76lxDk +eifMUYsMCLOenMrdkdThXkv1tjSMCyTJ7betkJw9zMdWabAfClUuRQWNgTa2pbie +SYHqBXJMyWZqCR2H8mTQ+mEmln7ylKOWjXMBBYLCtc2ElIAgXV8A4C01kq+xTsXK +xzYlLRtOfH+pRvcOLDESFQ2li9DlczltKN5mRAJqopTBW7Ia9xOwmuNfzspJosyN +KMrGHAtHf4uOB3oEoCac46QiIi81Pl4XR5ZLsxmp+HFHnBcIvGlfzZdgKxNK7w04 +09NI5Vgb/p+c3YnDnv+qQ2DnH2kNhsvVTIB80Xw9QTvUK9dcwkj/EXleYDiZc2Nb +7E/w76CunIqSRskdPL4nl3stTB09RYL+VZQxwMLriHxkqIm50z6MnK5hxUv6q6Dv +UG0rBg91CUYTDLzdQoBjKRne4WrbFfUljcvSaqgQrU6HAgMBAAE= +-----END PUBLIC KEY----- \ No newline at end of file diff --git a/taf/tests/test_api/repo/cli/test_repo_create_cmd.py b/taf/tests/test_api/repo/cli/test_repo_create_cmd.py index e429877e1..c3e680cb0 100644 --- a/taf/tests/test_api/repo/cli/test_repo_create_cmd.py +++ b/taf/tests/test_api/repo/cli/test_repo_create_cmd.py @@ -1,3 +1,4 @@ +import os from pathlib import Path from click.testing import CliRunner @@ -15,7 +16,7 @@ def test_repo_create_cmd_expect_success( [ "repo", "create", - ".\\test-law\\", + "test/law", "--keys-description", f"{str(with_delegations_no_yubikeys_path)}", "--keystore", @@ -32,11 +33,8 @@ def test_repo_create_cmd_expect_success( assert "Finished creating a new repository" in output cwd = Path.cwd() - assert (cwd / "test-law" / "metadata").exists() - assert (cwd / "test-law" / "targets").exists() - # TODO: actually have this. hopefully once issue is resolved error should get removed from assert - assert "An error occurred while signing target files" in output - assert "An error occurred while creating a new repository" in output + assert (cwd / "test/law" / "metadata").exists() + assert (cwd / "test/law" / "targets").exists() def test_repo_create_cmd_when_repo_already_created_expect_error( @@ -49,7 +47,7 @@ def test_repo_create_cmd_when_repo_already_created_expect_error( [ "repo", "create", - ".\\test-law\\", + "test/law", "--keys-description", f"{str(with_delegations_no_yubikeys_path)}", "--keystore", @@ -59,8 +57,8 @@ def test_repo_create_cmd_when_repo_already_created_expect_error( ], ) cwd = Path.cwd() - assert (cwd / "test-law" / "metadata").exists() - assert (cwd / "test-law" / "targets").exists() + assert (cwd / "test/law" / "metadata").exists() + assert (cwd / "test/law" / "targets").exists() output = caplog.text assert "Finished creating a new repository" in output @@ -70,7 +68,7 @@ def test_repo_create_cmd_when_repo_already_created_expect_error( [ "repo", "create", - ".\\test-law\\", + "test/law", "--keys-description", f"{str(with_delegations_no_yubikeys_path)}", "--keystore", @@ -79,8 +77,7 @@ def test_repo_create_cmd_when_repo_already_created_expect_error( "--test", ], ) - # TODO: expected to have this output, instead get same error as first test assert ( - '"test-law" is a git repository containing the metadata directory. Generating new metadata files could make the repository invalid. Aborting' + f'Metadata directory found inside "test{os.sep}law". Recreate metadata files? [y/N]' in result.output ) diff --git a/taf/tests/test_api/roles/cli/test_roles_cmds.py b/taf/tests/test_api/roles/cli/test_roles_cmds.py index ac0223bc5..3d5804f00 100644 --- a/taf/tests/test_api/roles/cli/test_roles_cmds.py +++ b/taf/tests/test_api/roles/cli/test_roles_cmds.py @@ -21,7 +21,7 @@ def test_roles_add_cmd_expect_success(auth_repo_with_delegations, roles_keystore "/delegated_path_inside_targets1", "/delegated_path_inside_targets2", ], - "keys_number": 1, + "keys_number": 2, "threshold": 1, "yubikey": False, "scheme": "rsa-pkcs1v15-sha256", @@ -34,7 +34,7 @@ def test_roles_add_cmd_expect_success(auth_repo_with_delegations, roles_keystore [ "roles", "add", - "rolename1", + "new_role", "--path", f"{str(auth_repo_with_delegations.path)}", "--config-file", @@ -43,10 +43,9 @@ def test_roles_add_cmd_expect_success(auth_repo_with_delegations, roles_keystore f"{str(roles_keystore)}", ], ) - # TODO: there seems to be an assertion error here check_new_role( auth_repo_with_delegations, - "rolename1", + "new_role", ["/delegated_path_inside_targets1", "/delegated_path_inside_targets2"], str(roles_keystore), "targets", @@ -185,7 +184,10 @@ def test_rotate_key_cmd_expect_success(auth_repo, roles_keystore): f"{pub_key_path}", "--keystore", f"{str(roles_keystore)}", - "--no-commit", + "--revoke-commit-msg", + "Remove targets key", + "--add-commit-msg", + "Add signing key", ], ) new_targets_keyids = auth_repo.get_keyids_of_role("targets") diff --git a/taf/tests/tuf/test_create_edit_repo/test_update.py b/taf/tests/tuf/test_create_edit_repo/test_update.py index 5fe044650..f7eddded8 100644 --- a/taf/tests/tuf/test_create_edit_repo/test_update.py +++ b/taf/tests/tuf/test_create_edit_repo/test_update.py @@ -235,6 +235,7 @@ def test_add_metadata_keys(tuf_repo, signers_with_delegations, public_keys): def test_revoke_metadata_key( tuf_repo, signers_with_delegations, public_keys_with_delegations, public_keys ): + tuf_repo.add_signers_to_cache(signers_with_delegations) targets_key1 = public_keys_with_delegations["targets"][0] targets_key2 = public_keys_with_delegations["targets"][1] diff --git a/taf/tools/roles/__init__.py b/taf/tools/roles/__init__.py index 56c297295..7d887fddd 100644 --- a/taf/tools/roles/__init__.py +++ b/taf/tools/roles/__init__.py @@ -327,18 +327,20 @@ def rotate_signing_key_command(): @click.option("--pub-key-path", default=None, help="Path to the public key corresponding to the private key which should be registered as the role's signing key") @click.option("--keystore", default=None, help="Location of the keystore files") @click.option("--scheme", default=DEFAULT_RSA_SIGNATURE_SCHEME, help="A signature scheme used for signing") - @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") - def rotate_key(path, role, keyid, pub_key_path, keystore, scheme, no_commit, prompt_for_keys): + @click.option("--revoke-commit-msg", default=None, help="Revoke key commit message") + @click.option("--add-commit-msg", default=None, help="Add new signing key commit message") + def rotate_key(path, role, keyid, pub_key_path, keystore, scheme, prompt_for_keys, revoke_commit_msg, add_commit_msg): rotate_signing_key( path=path, roles=role, key_id=keyid, keystore=keystore, scheme=scheme, - commit=not no_commit, prompt_for_keys=prompt_for_keys, pub_key_path=pub_key_path, + revoke_commit_msg=revoke_commit_msg, + add_commit_msg=add_commit_msg, ) return rotate_key diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 2ce9753ab..aaf2f8f69 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -201,7 +201,8 @@ def calculate_length(self, md: Metadata) -> int: def add_signers_to_cache(self, roles_signers: Dict): for role, signers in roles_signers.items(): - self._load_role_signers(role, signers) + if self._role_obj(role): + self._load_role_signers(role, signers) def all_target_files(self) -> Set: """ From f496fe98c1a0a03bcc7c264d6768f989c486add2 Mon Sep 17 00:00:00 2001 From: Renata Date: Mon, 23 Dec 2024 21:09:13 -0500 Subject: [PATCH 12/26] chore: formatting --- taf/yubikey.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/taf/yubikey.py b/taf/yubikey.py index 25789d69e..36825e0f1 100644 --- a/taf/yubikey.py +++ b/taf/yubikey.py @@ -236,9 +236,7 @@ def export_yk_certificate(certs_dir, key: SSlibKey): @raise_yubikey_err("Cannot get public key in TUF format.") -def get_piv_public_key_tuf( - scheme=DEFAULT_RSA_SIGNATURE_SCHEME -) -> SSlibKey: +def get_piv_public_key_tuf(scheme=DEFAULT_RSA_SIGNATURE_SCHEME) -> SSlibKey: """Return public key from a Yubikey in TUF's RSAKEY_SCHEMA format. Args: @@ -257,6 +255,7 @@ def get_piv_public_key_tuf( pub_key_pem = export_piv_pub_key().decode("utf-8") return get_sslib_key_from_value(pub_key_pem, scheme) + @raise_yubikey_err("Cannot sign data.") def sign_piv_rsa_pkcs1v15(data, pin): """Sign data with key from YubiKey's piv slot. From 019d963f4d040fe2fe69a0eb8e78128a39ca719c Mon Sep 17 00:00:00 2001 From: Renata Date: Tue, 24 Dec 2024 14:32:03 -0500 Subject: [PATCH 13/26] fix, tests: minor update metadata fix and additional tests --- taf/api/metadata.py | 7 +- .../test_api/metadata/api/test_metadata.py | 70 ++++++++++++++++++- 2 files changed, 72 insertions(+), 5 deletions(-) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index 353411865..fdfdb67fc 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -136,8 +136,6 @@ def update_metadata_expiration_date( if len(roles) == 1 and Timestamp.type in roles: update_snapshot_and_timestamp = False - if Timestamp.type in roles and Snapshot.type in roles: - update_snapshot_and_timestamp = True update_snapshot_expiration_date = Snapshot.type in roles update_timestamp_expiration_date = Timestamp.type in roles @@ -167,4 +165,7 @@ def update_metadata_expiration_date( auth_repo.clear_open_metadata() if update_snapshot_and_timestamp: - auth_repo.update_snapshot_and_timestamp() + if len(roles) == 1 and Snapshot.type in roles: + auth_repo.do_timestamp() + else: + auth_repo.update_snapshot_and_timestamp() diff --git a/taf/tests/test_api/metadata/api/test_metadata.py b/taf/tests/test_api/metadata/api/test_metadata.py index b63bb98f2..a530c8d02 100644 --- a/taf/tests/test_api/metadata/api/test_metadata.py +++ b/taf/tests/test_api/metadata/api/test_metadata.py @@ -61,6 +61,8 @@ def test_update_root_metadata( initial_commits_num = len(auth_repo.list_commits()) roles = [Root.type] INTERVAL = 180 + timestamp_version = auth_repo_expired.timestamp().version + snapshot_version = auth_repo_expired.snapshot().version update_metadata_expiration_date( path=auth_repo_path, roles=roles, @@ -80,6 +82,8 @@ def test_update_root_metadata( for role in (Targets.type, "delegated_role", "inner_role"): actual_expiration = auth_repo.get_expiration_date(role) assert actual_expiration < now + assert auth_repo_expired.timestamp().version == timestamp_version + 1 + assert auth_repo_expired.snapshot().version == snapshot_version + 1 @freeze_time("2023-01-01") @@ -106,6 +110,64 @@ def test_check_expiration_date_when_expired_and_will_expire( assert Root.type in will_expire +@freeze_time("2023-01-01") +def test_update_snapshot_metadata( + auth_repo_expired: AuthenticationRepository, keystore_delegations: str +): + # update root metadata, expect snapshot and timestamp to be updated too + # targets should not be updated + auth_repo_path = auth_repo_expired.path + auth_repo = AuthenticationRepository(path=auth_repo_path) + initial_commits_num = len(auth_repo.list_commits()) + roles = [Snapshot.type] + INTERVAL = 7 + timestamp_version = auth_repo_expired.timestamp().version + snapshot_version = auth_repo_expired.snapshot().version + update_metadata_expiration_date( + path=auth_repo_path, + roles=roles, + interval=INTERVAL, + keystore=keystore_delegations, + push=False, + ) + commits = auth_repo.list_commits() + assert len(commits) == initial_commits_num + 1 + assert commits[0].message.strip() == git_commit_message( + "update-expiration-dates", roles=",".join(roles) + ) + assert auth_repo_expired.timestamp().version == timestamp_version + 1 + assert auth_repo_expired.snapshot().version == snapshot_version + 1 + + +@freeze_time("2023-01-01") +def test_update_timestamp_metadata( + auth_repo_expired: AuthenticationRepository, keystore_delegations: str +): + # update root metadata, expect snapshot and timestamp to be updated too + # targets should not be updated + auth_repo_path = auth_repo_expired.path + auth_repo = AuthenticationRepository(path=auth_repo_path) + initial_commits_num = len(auth_repo.list_commits()) + roles = [Timestamp.type] + INTERVAL = 1 + timestamp_version = auth_repo_expired.timestamp().version + snapshot_version = auth_repo_expired.snapshot().version + update_metadata_expiration_date( + path=auth_repo_path, + roles=roles, + interval=INTERVAL, + keystore=keystore_delegations, + push=False, + ) + commits = auth_repo.list_commits() + assert len(commits) == initial_commits_num + 1 + assert commits[0].message.strip() == git_commit_message( + "update-expiration-dates", roles=",".join(roles) + ) + assert auth_repo_expired.timestamp().version == timestamp_version + 1 + assert auth_repo_expired.snapshot().version == snapshot_version + + @freeze_time("2023-01-01") def test_update_multiple_roles_metadata( auth_repo_expired: AuthenticationRepository, keystore_delegations: str @@ -117,6 +179,8 @@ def test_update_multiple_roles_metadata( initial_commits_num = len(auth_repo.list_commits()) roles = [Targets.type, "delegated_role", "inner_role"] INTERVAL = 365 + timestamp_version = auth_repo_expired.timestamp().version + snapshot_version = auth_repo_expired.snapshot().version update_metadata_expiration_date( path=auth_repo_path, roles=roles, @@ -133,6 +197,8 @@ def test_update_multiple_roles_metadata( expected_expiration = _get_date(INTERVAL) actual_expiration = auth_repo.get_expiration_date(role) assert expected_expiration == actual_expiration + assert auth_repo_expired.timestamp().version == timestamp_version + 1 + assert auth_repo_expired.snapshot().version == snapshot_version + 1 @freeze_time("2023-01-01") @@ -143,8 +209,8 @@ def test_check_expiration_date_when_no_expired( expired, will_expire = check_expiration_dates( auth_repo_path, interval=90, print_output=False ) - assert len(expired) == 2 - assert not len(will_expire) + assert not len(expired) + assert len(will_expire) == 2 def _check_expired_role( From 503653b8f67a2f038b24a9bb7914aeca476683a8 Mon Sep 17 00:00:00 2001 From: Renata Date: Tue, 24 Dec 2024 19:11:27 -0500 Subject: [PATCH 14/26] fix: do not update snapshot and timestamp twice --- taf/api/metadata.py | 31 ++++++++++++++++--------------- taf/tuf/repository.py | 11 ++++++++++- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index fdfdb67fc..ab94eadbe 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -94,6 +94,7 @@ def update_metadata_expiration_date( scheme: Optional[str] = DEFAULT_RSA_SIGNATURE_SCHEME, start_date: Optional[datetime] = None, commit: Optional[bool] = True, + commit_msg: Optional[str] = None, prompt_for_keys: Optional[bool] = False, push: Optional[bool] = True, update_snapshot_and_timestamp: Optional[bool] = True, @@ -113,6 +114,7 @@ def update_metadata_expiration_date( start_date (optional): Date to which expiration interval is added. Set to today if not specified. commit (optional): Indicates if the changes should be committed and pushed automatically. + commit_msg (optional): Custom commit messages. prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. push (optional): Flag specifying whether to push to remote @@ -128,15 +130,14 @@ def update_metadata_expiration_date( if start_date is None: start_date = datetime.now() - commit_msg = git_commit_message("update-expiration-dates", roles=",".join(roles)) + commit_msg = commit_msg or git_commit_message( + "update-expiration-dates", roles=",".join(roles) + ) # update the order, snapshot has to be updated before timestamp # and all other roles have to be updated before snapshot # all other roles can be updated in any order - if len(roles) == 1 and Timestamp.type in roles: - update_snapshot_and_timestamp = False - update_snapshot_expiration_date = Snapshot.type in roles update_timestamp_expiration_date = Timestamp.type in roles @@ -151,21 +152,21 @@ def update_metadata_expiration_date( commit_msg=commit_msg, push=push, ): - if update_snapshot_and_timestamp: - if update_snapshot_expiration_date: - auth_repo.add_to_open_metadata(Snapshot.type) - if update_timestamp_expiration_date: - auth_repo.add_to_open_metadata(Timestamp.type) + if update_snapshot_expiration_date: + auth_repo.add_to_open_metadata([Snapshot.type]) + if update_timestamp_expiration_date: + auth_repo.add_to_open_metadata([Timestamp.type]) for role in roles: auth_repo.set_metadata_expiration_date( role, start_date=start_date, interval=interval ) - auth_repo.clear_open_metadata() + auth_repo.remove_from_open_metadata([Snapshot.type]) + # it is important to update snapshot first + if update_snapshot_expiration_date or update_snapshot_and_timestamp: + auth_repo.do_snapshot(force=True) - if update_snapshot_and_timestamp: - if len(roles) == 1 and Snapshot.type in roles: - auth_repo.do_timestamp() - else: - auth_repo.update_snapshot_and_timestamp() + auth_repo.remove_from_open_metadata([Timestamp.type]) + if update_timestamp_expiration_date or update_snapshot_and_timestamp: + auth_repo.do_timestamp(force=True) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index aaf2f8f69..621e1ab8f 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -342,7 +342,8 @@ def add_new_roles_to_snapshot(self, roles: List[str]) -> None: def add_to_open_metadata(self, roles: List[str]) -> None: """ In order to execute several methods before updating the metadata on disk, - these methods need to be added to the _metadata_to_keep_open list. + some metadata might need to be kept open, which is done by adding them to + _metadata_to_keep_open list. This method adds all roles from the provided list to _metadata_to_keep_open. """ self._metadata_to_keep_open.update(roles) @@ -1407,6 +1408,14 @@ def remove_delegated_paths(self, roles_paths: Dict[str, List[str]]): updated = True return updated + def remove_from_open_metadata(self, roles: List[str]) -> None: + """ + Removes the listed roles from metadata_to_keep_open list + """ + for role in roles: + if role in self._metadata_to_keep_open: + self._metadata_to_keep_open.remove(role) + def roles_targets_for_filenames(self, target_filenames): """Sort target files by roles Args: From 8ce54e0307c71ef3a3c0bd88295b099b71fdbff4 Mon Sep 17 00:00:00 2001 From: Renata Date: Tue, 24 Dec 2024 20:08:58 -0500 Subject: [PATCH 15/26] fix: fix update timestamp without snapshot --- taf/api/metadata.py | 5 ++++- taf/tuf/repository.py | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index ab94eadbe..e195e56e9 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -164,7 +164,10 @@ def update_metadata_expiration_date( auth_repo.remove_from_open_metadata([Snapshot.type]) # it is important to update snapshot first - if update_snapshot_expiration_date or update_snapshot_and_timestamp: + + if (update_snapshot_expiration_date or update_snapshot_and_timestamp) and not ( + len(roles) == 1 and update_timestamp_expiration_date + ): auth_repo.do_snapshot(force=True) auth_repo.remove_from_open_metadata([Timestamp.type]) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 621e1ab8f..a557dbb37 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -684,6 +684,10 @@ def delete_unregistered_target_files(self, targets_role="targets"): if file_rel_path not in self.get_targets_of_role(targets_role): (self.targets_path / file_rel_path).unlink() + def do_timestamp(self, force=False): + self._snapshot_info.version = self._signed_obj(Snapshot.type).version + return super().do_timestamp(force) + def find_delegated_roles_parent(self, delegated_role: str) -> Optional[str]: """ Find parent role of the specified delegated targets role From 95263f0750c0fb714f9a5448a2379c4cf68c5a31 Mon Sep 17 00:00:00 2001 From: Renata Date: Tue, 24 Dec 2024 22:21:49 -0500 Subject: [PATCH 16/26] feat, fix: add update timestamp and snapshot api function, minor fixes --- taf/api/metadata.py | 46 +++++++++++++++++ taf/api/targets.py | 2 +- taf/git.py | 2 +- taf/tools/yubikey/__init__.py | 80 +++++++++++++++++++++--------- taf/tools/yubikey/yubikey_utils.py | 5 +- 5 files changed, 106 insertions(+), 29 deletions(-) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index e195e56e9..127fb5c84 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -173,3 +173,49 @@ def update_metadata_expiration_date( auth_repo.remove_from_open_metadata([Timestamp.type]) if update_timestamp_expiration_date or update_snapshot_and_timestamp: auth_repo.do_timestamp(force=True) + + +@check_if_clean +def update_snapshot_and_timestamp( + path: str, + keystore: Optional[str] = None, + scheme: Optional[str] = DEFAULT_RSA_SIGNATURE_SCHEME, + commit: Optional[bool] = True, + commit_msg: Optional[str] = None, + prompt_for_keys: Optional[bool] = False, + push: Optional[bool] = True, +) -> None: + """ + Update expiration snapshot and timestamp + + Arguments: + path: Authentication repository's location. + keystore (optional): Keystore directory's path + scheme (optional): Signature scheme. + commit (optional): Indicates if the changes should be committed and pushed automatically. + commit_msg (optional): Custom commit messages. + prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. + push (optional): Flag specifying whether to push to remote + + Side Effects: + Updates metadata files, saves changes to disk and commits changes + unless no_commit is set to True. + + Returns: + None + """ + + auth_repo = AuthenticationRepository(path=path) + + with manage_repo_and_signers( + auth_repo, + [], + keystore, + scheme, + prompt_for_keys, + load_snapshot_and_timestamp=True, + commit=commit, + commit_msg=commit_msg, + push=push, + ): + auth_repo.update_snapshot_and_timestamp() diff --git a/taf/api/targets.py b/taf/api/targets.py index 7a8dc77d6..de65b6d94 100644 --- a/taf/api/targets.py +++ b/taf/api/targets.py @@ -339,7 +339,7 @@ def register_target_files( keystore: Location of the keystore files. roles_key_infos: A dictionary whose keys are role names, while values contain information about the keys. scheme (optional): Signing scheme. Set to rsa-pkcs1v15-sha256 by default. - taf_repo (optional): If taf repository is already initialized, it can be passed and used. + auth_repo (optional): If auth repository is already initialized, it can be passed and used. write (optional): Write metadata updates to disk if set to True commit (optional): Indicates if the changes should be committed and pushed automatically. prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. diff --git a/taf/git.py b/taf/git.py index e63c8da3b..f2c6868a0 100644 --- a/taf/git.py +++ b/taf/git.py @@ -98,7 +98,7 @@ def __init__( self.path = self._validate_repo_path(path) self.alias = alias - self.urls = self._validate_urls(urls) + self.urls = self._validate_urls([str(url) for url in urls]) if urls else None self.allow_unsafe = allow_unsafe self.custom = custom or {} if default_branch is None: diff --git a/taf/tools/yubikey/__init__.py b/taf/tools/yubikey/__init__.py index 4704f66b8..ec720cf79 100644 --- a/taf/tools/yubikey/__init__.py +++ b/taf/tools/yubikey/__init__.py @@ -1,6 +1,12 @@ import click import json -from taf.api.yubikey import export_yk_certificate, export_yk_public_pem, get_yk_roles, setup_signing_yubikey, setup_test_yubikey +from taf.api.yubikey import ( + export_yk_certificate, + export_yk_public_pem, + get_yk_roles, + setup_signing_yubikey, + setup_test_yubikey, +) from taf.exceptions import YubikeyError from taf.repository_utils import find_valid_repository from taf.tools.cli import catch_cli_exception @@ -12,32 +18,42 @@ def check_pin_command(): def check_pin(pin): try: from taf.yubikey import is_valid_pin + valid, retries = is_valid_pin(pin) inserted = True except YubikeyError: valid = False inserted = False retries = None - print(json.dumps({ - "pin": valid, - "retries": retries, - "inserted": inserted - })) + print(json.dumps({"pin": valid, "retries": retries, "inserted": inserted})) + return check_pin def export_pub_key_command(): - @click.command(help="Export the inserted Yubikey's public key and save it to the specified location.") - @click.option("--output", help="File to which the exported public key will be written. The result will be written to the console if path is not specified") + @click.command( + help="Export the inserted Yubikey's public key and save it to the specified location." + ) + @click.option( + "--output", + help="File to which the exported public key will be written. The result will be written to the console if path is not specified", + ) def export_pub_key(output): export_yk_public_pem(output) + return export_pub_key def get_roles_command(): - @click.command(help="Export the inserted Yubikey's public key and save it to the specified location.") + @click.command( + help="Export the inserted Yubikey's public key and save it to the specified location." + ) @catch_cli_exception(handle=YubikeyError, print_error=True) - @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") + @click.option( + "--path", + default=".", + help="Authentication repository's location. If not specified, set to the current directory", + ) def get_roles(path): path = find_valid_repository(path) roles_with_paths = get_yk_roles(path) @@ -45,40 +61,56 @@ def get_roles(path): print(f"\n{role}") for path in paths: print(f"\n -{path}") + return get_roles def export_certificate_command(): - @click.command(help="Export the inserted Yubikey's public key and save it to the specified location.") - @click.option("--output", help="File to which the exported certificate key will be written. The result will be written to the user's home directory by default") + @click.command( + help="Export the inserted Yubikey's public key and save it to the specified location." + ) + @click.option( + "--output", + help="File to which the exported certificate key will be written. The result will be written to the user's home directory by default", + ) def export_certificate(output): export_yk_certificate(output) + return export_certificate def setup_signing_key_command(): - @click.command(help="""Generate a new key on the yubikey and set the pin. Export the generated certificate + @click.command( + help="""Generate a new key on the yubikey and set the pin. Export the generated certificate to the specified directory. - WARNING - this will delete everything from the inserted key.""") - @click.option("--certs-dir", help="Path of the directory where the exported certificate will be saved. Set to the user home directory by default") + WARNING - this will delete everything from the inserted key.""" + ) + @click.option( + "--certs-dir", + help="Path of the directory where the exported certificate will be saved. Set to the user home directory by default", + ) def setup_signing_key(certs_dir): - setup_signing_yubikey(certs_dir,key_size=2048) + setup_signing_yubikey(certs_dir, key_size=2048) + return setup_signing_key def setup_test_key_command(): - @click.command(help="""Copies the specified key onto the inserted YubiKey - WARNING - this will reset the inserted key.""") + @click.command( + help="""Copies the specified key onto the inserted YubiKey + WARNING - this will reset the inserted key.""" + ) @click.argument("key-path") def setup_test_key(key_path): setup_test_yubikey(key_path) + return setup_test_key def attach_to_group(group): - group.add_command(check_pin_command(), name='check-pin') - group.add_command(export_pub_key_command(), name='export-pub-key') - group.add_command(get_roles_command(), name='get-roles') - group.add_command(export_certificate_command(), name='export-certificate') - group.add_command(setup_signing_key_command(), name='setup-signing-key') - group.add_command(setup_test_key_command(), name='setup-test-key') + group.add_command(check_pin_command(), name="check-pin") + group.add_command(export_pub_key_command(), name="export-pub-key") + group.add_command(get_roles_command(), name="get-roles") + group.add_command(export_certificate_command(), name="export-certificate") + group.add_command(setup_signing_key_command(), name="setup-signing-key") + group.add_command(setup_test_key_command(), name="setup-test-key") diff --git a/taf/tools/yubikey/yubikey_utils.py b/taf/tools/yubikey/yubikey_utils.py index 87bd79b9f..ab588535a 100644 --- a/taf/tools/yubikey/yubikey_utils.py +++ b/taf/tools/yubikey/yubikey_utils.py @@ -5,8 +5,7 @@ from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization -from securesystemslib.rsa_keys import create_rsa_signature -from tuf.repository_tool import import_rsakey_from_pem +from taf.tuf.keys import load_signer_from_file VALID_PIN = "123456" WRONG_PIN = "111111" @@ -30,7 +29,7 @@ def __init__(self, priv_key_path, pub_key_path, scheme, serial=None, pin=VALID_P self.pub_key_pem, default_backend() ) - self.tuf_key = import_rsakey_from_pem(self.pub_key_pem.decode("utf-8"), scheme) + self.tuf_key = load_signer_from_file(priv_key_path) @property def driver(self): From 408f9bda77c0a4619b27f7c62e608ab1ae0ac65d Mon Sep 17 00:00:00 2001 From: n-dusan Date: Thu, 26 Dec 2024 00:27:31 +0000 Subject: [PATCH 17/26] feat: introduce get_role_paths back to auth_repo.py It was a convenience method used to figure out which paths from repositories.json match which role. Early exit function if repo is bare (to signal that it's currently not supported) --- taf/auth_repo.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/taf/auth_repo.py b/taf/auth_repo.py index 54842cc55..954de94f4 100644 --- a/taf/auth_repo.py +++ b/taf/auth_repo.py @@ -261,6 +261,38 @@ def get_info_json( def get_metadata_path(self, role): return self.path / METADATA_DIRECTORY_NAME / f"{role}.json" + def get_role_repositories(self, role, parent_role=None): + """Get repositories of the given role + + Args: + - role(str): TUF role (root, targets, timestamp, snapshot or delegated one) + - parent_role(str): Name of the parent role of the delegated role. If not specified, + it will be set automatically, but this might be slow if there + are many delegations. + + Returns: + Repositories' path from repositories.json that matches given role paths + + Raises: + - securesystemslib.exceptions.FormatError: If the arguments are improperly formatted. + - securesystemslib.exceptions.UnknownRoleError: If 'rolename' has not been delegated by this + """ + if self.is_bare_repository: + # raise an error for now + # once we have an ergonomic way to get repositories from a bare repository, remove the error + raise Exception( + "Getting role repositories from a bare repository is not yet supported." + ) + + role_paths = self._tuf_repository.get_role_paths(role) + + target_repositories = self._get_target_repositories_from_disk() + return [ + repo + for repo in target_repositories + if any([fnmatch.fnmatch(repo, path) for path in role_paths]) + ] + def is_commit_authenticated(self, target_name: str, commit: str) -> bool: """Checks if passed commit is ever authenticated for given target name.""" for auth_commit in self.all_commits_on_branch(reverse=False): @@ -568,3 +600,13 @@ def targets_at_revisions( "custom": target_content, } return targets + + def _get_target_repositories_from_disk(self): + """ + Read repositories.json from disk and return the list of target repositories + """ + repositories_path = self.targets_path / "repositories.json" + if repositories_path.exists(): + repositories = repositories_path.read_text() + repositories = json.loads(repositories)["repositories"] + return [str(Path(target_path).as_posix()) for target_path in repositories] From 061a8605c1bb84f0262ca8118643c7d35ca5d782 Mon Sep 17 00:00:00 2001 From: Renata Date: Wed, 25 Dec 2024 23:38:44 -0500 Subject: [PATCH 18/26] feat: add create/remove target files as a separate repository function --- taf/tuf/repository.py | 65 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 52 insertions(+), 13 deletions(-) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index a557dbb37..6e80b6345 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -657,6 +657,56 @@ def create_delegated_roles( added_roles.append(role_data.name) return added_roles, existing_roles + def create_and_remove_target_files( + self, added_data: Optional[Dict] = None, removed_data: Optional[Dict] = None + ) -> None: + """Create/updates/removes files in the targets directory + Args: + - added_data(dict): Dictionary of new data whose keys are target paths of repositories + (as specified in targets.json, relative to the targets dictionary). + The values are of form: + { + target: content of the target file + } + - removed_data(dict): Dictionary of the old data whose keys are target paths of + repositories + (as specified in targets.json, relative to the targets dictionary). + The values are not needed. This is just for consistency. + + Content of the target file can be a dictionary, in which case a json file will be created. + If that is not the case, an ordinary textual file will be created. + If content is not specified and the file already exists, it will not be modified. + If it does not exist, an empty file will be created. To replace an existing file with an + empty file, specify empty content (target: '') + + Returns: + - Role whose targets were updates + """ + added_data = {} if added_data is None else added_data + removed_data = {} if removed_data is None else removed_data + data = dict(added_data, **removed_data) + if not data: + raise TargetsError("Nothing to be modified!") + + added_paths = [] + for path, target_data in added_data.items(): + target_path = (self.targets_path / path).absolute() + self._create_target_file(target_path, target_data) + added_paths.append(target_path) + + # remove existing target files + removed_paths = [] + for path in removed_data.keys(): + target_path = (self.targets_path / path).absolute() + if target_path.exists(): + if target_path.is_file(): + target_path.unlink() + elif target_path.is_dir(): + shutil.rmtree(target_path, onerror=on_rm_error) + removed_paths.append(str(path)) + + return added_paths, removed_paths + def _create_target_object( self, filesystem_path: str, target_path: str, custom: Optional[Dict] ) -> TargetFile: @@ -1284,26 +1334,15 @@ def modify_targets( raise TargetsError( f"Could not find a common role for target paths:\n{'-'.join(target_paths)}" ) - # add new target files + _, removed_paths = self.create_and_remove_target_files(added_data, removed_data) + target_files = [] for path, target_data in added_data.items(): target_path = (self.targets_path / path).absolute() - self._create_target_file(target_path, target_data) custom = target_data.get("custom", None) target_file = self._create_target_object(target_path, path, custom) target_files.append(target_file) - # remove existing target files - removed_paths = [] - for path in removed_data.keys(): - target_path = (self.targets_path / path).absolute() - if target_path.exists(): - if target_path.is_file(): - target_path.unlink() - elif target_path.is_dir(): - shutil.rmtree(target_path, onerror=on_rm_error) - removed_paths.append(str(path)) - targets_role = self._modify_tarets_role( target_files, removed_paths, targets_role ) From 8f1b2e953d682875ad1e26a9b22cabb5aa1c2bd2 Mon Sep 17 00:00:00 2001 From: Renata Date: Thu, 26 Dec 2024 00:09:58 -0500 Subject: [PATCH 19/26] refact: update fake yubikey --- taf/tools/yubikey/yubikey_utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/taf/tools/yubikey/yubikey_utils.py b/taf/tools/yubikey/yubikey_utils.py index ab588535a..ffbf6cbdc 100644 --- a/taf/tools/yubikey/yubikey_utils.py +++ b/taf/tools/yubikey/yubikey_utils.py @@ -111,10 +111,8 @@ def sign(self, slot, key_type, data, hash, padding): if isinstance(data, str): data = data.encode("utf-8") - sig, _ = create_rsa_signature( - self._driver.priv_key_pem.decode("utf-8"), data, self._driver.scheme - ) - return sig + signature = self._driver.priv_key.sign(data, padding, hash) + return signature def verify_pin(self, pin): if self._driver.pin != pin: From af0af2e198342438865d186cad355f445ee7c303 Mon Sep 17 00:00:00 2001 From: Renata Date: Thu, 26 Dec 2024 00:14:02 -0500 Subject: [PATCH 20/26] chore: mypy return type fix --- taf/tuf/repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 6e80b6345..e82cd805d 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -659,7 +659,7 @@ def create_delegated_roles( def create_and_remove_target_files( self, added_data: Optional[Dict] = None, removed_data: Optional[Dict] = None - ) -> None: + ) -> Tuple: """Create/updates/removes files in the targets directory Args: - added_data(dict): Dictionary of new data whose keys are target paths of repositories From 19ebb235ad3a28d8d31bb641b2b1969dca6bde64 Mon Sep 17 00:00:00 2001 From: Renata Date: Thu, 26 Dec 2024 13:33:34 -0500 Subject: [PATCH 21/26] fix: set storage commit to None in repository at revision context manager --- taf/auth_repo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/taf/auth_repo.py b/taf/auth_repo.py index 954de94f4..5d7af21f2 100644 --- a/taf/auth_repo.py +++ b/taf/auth_repo.py @@ -314,7 +314,7 @@ def repository_at_revision(self, commit: str): """ self._storage.commit = commit yield - self._storage.commit = self.head_commit_sha() + self._storage.commit = None def set_last_validated_data( self, From 61e678826c268b6a743a052351f40aa3ceca3842 Mon Sep 17 00:00:00 2001 From: Renata Date: Sun, 29 Dec 2024 01:01:15 -0500 Subject: [PATCH 22/26] feat: add a function for syncing snapshot with the provided roles --- taf/api/metadata.py | 7 ++++++- taf/tuf/repository.py | 11 ++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index 127fb5c84..a5e51bd7e 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -179,6 +179,7 @@ def update_metadata_expiration_date( def update_snapshot_and_timestamp( path: str, keystore: Optional[str] = None, + roles_to_sync: Optional[List[str]] = None, scheme: Optional[str] = DEFAULT_RSA_SIGNATURE_SCHEME, commit: Optional[bool] = True, commit_msg: Optional[str] = None, @@ -218,4 +219,8 @@ def update_snapshot_and_timestamp( commit_msg=commit_msg, push=push, ): - auth_repo.update_snapshot_and_timestamp() + if roles_to_sync: + auth_repo.sync_snapshot_with_roles(roles_to_sync) + auth_repo.do_timestamp(force=True) + else: + auth_repo.update_snapshot_and_timestamp() diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index e82cd805d..eff154bd8 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -883,7 +883,6 @@ def get_all_target_files_state(self) -> Tuple: """ added_target_files: Dict = {} removed_target_files: Dict = {} - # current fs state fs_target_files = self.all_target_files() # current signed state @@ -1590,6 +1589,16 @@ def sort_roles_targets_for_filenames(self): roles_targets.setdefault(role, []).append(target_file) return roles_targets + def sync_snapshot_with_roles(self, roles: List[str]) -> None: + """ + Add versions of newly created target roles to the snapshot. + Also update the versions of their parent roles, which are modified + when a new delegated role is added. + """ + with self.edit(Snapshot.type) as sn: + for role in roles: + sn.meta[f"{role}.json"].version = sn.meta[f"{role}.json"].version + 1 + def update_target_role(self, role: str, target_paths: Dict): """ Update the specified target role by adding or removing From 60e2c70a202653ca4382c3fd96474f403f2be4ee Mon Sep 17 00:00:00 2001 From: n-dusan Date: Mon, 30 Dec 2024 16:12:12 +0000 Subject: [PATCH 23/26] fix: set update_expiration_date in update snapshot and timestamp --- taf/api/metadata.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/taf/api/metadata.py b/taf/api/metadata.py index a5e51bd7e..50de25602 100644 --- a/taf/api/metadata.py +++ b/taf/api/metadata.py @@ -185,6 +185,7 @@ def update_snapshot_and_timestamp( commit_msg: Optional[str] = None, prompt_for_keys: Optional[bool] = False, push: Optional[bool] = True, + update_expiration_dates: Optional[bool] = True, ) -> None: """ Update expiration snapshot and timestamp @@ -197,6 +198,7 @@ def update_snapshot_and_timestamp( commit_msg (optional): Custom commit messages. prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. push (optional): Flag specifying whether to push to remote + update_expiration_dates (optional): Flag specifying whether to update expiration dates Side Effects: Updates metadata files, saves changes to disk and commits changes @@ -219,6 +221,11 @@ def update_snapshot_and_timestamp( commit_msg=commit_msg, push=push, ): + if update_expiration_dates: + auth_repo.add_to_open_metadata([Snapshot.type, Timestamp.type]) + for role in [Snapshot.type, Timestamp.type]: + auth_repo.set_metadata_expiration_date(role) + auth_repo.clear_open_metadata() if roles_to_sync: auth_repo.sync_snapshot_with_roles(roles_to_sync) auth_repo.do_timestamp(force=True) From 87394b1cecb6d0664cf91ea9d7c1f2d1013e85db Mon Sep 17 00:00:00 2001 From: n-dusan Date: Fri, 3 Jan 2025 16:00:00 +0100 Subject: [PATCH 24/26] chore: fix typo --- taf/tuf/repository.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index eff154bd8..ec6d76e75 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -1342,12 +1342,12 @@ def modify_targets( target_file = self._create_target_object(target_path, path, custom) target_files.append(target_file) - targets_role = self._modify_tarets_role( + targets_role = self._modify_targets_role( target_files, removed_paths, targets_role ) return targets_role - def _modify_tarets_role( + def _modify_targets_role( self, added_target_files: List[TargetFile], removed_paths: List[str], @@ -1621,7 +1621,7 @@ def update_target_role(self, role: str, target_paths: Dict): ) target_files.append(target_file) - self._modify_tarets_role(target_files, removed_paths, role) + self._modify_targets_role(target_files, removed_paths, role) def update_snapshot_and_timestamp(self, force: Optional[bool] = True): """ From 1a38bb308d687353c304557d77d4e20dec0ce30e Mon Sep 17 00:00:00 2001 From: n-dusan Date: Fri, 3 Jan 2025 18:51:47 +0100 Subject: [PATCH 25/26] fix: convert CRLF to LF before creating target object This is because file hashes get calculated differently on Windows vs Linux if line endings are first saved as LF then get converted to CRLF on Windows. To resolve, normalize all line endings to LF. --- taf/tuf/repository.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index ec6d76e75..9086792a1 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -30,7 +30,12 @@ yk = YubikeyMissingLibrary() # type: ignore from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME -from taf.utils import default_backend, get_file_details, on_rm_error +from taf.utils import ( + default_backend, + get_file_details, + on_rm_error, + normalize_file_line_endings, +) from tuf.api.metadata import ( Metadata, MetaFile, @@ -711,8 +716,12 @@ def _create_target_object( self, filesystem_path: str, target_path: str, custom: Optional[Dict] ) -> TargetFile: """ - Creates a TUF target object, later used to update targets metadata + Creates a TUF target object, later used to update targets metadata. + It's first necessary to normalize file line endings (convert all line endings to unix style endings) + before adding a target objects due to hashes getting calculated differently when using CRLF vs LF line endings. + So we instead convert all to unix style endings. """ + normalize_file_line_endings(filesystem_path) data = Path(filesystem_path).read_text().encode() target_file = TargetFile.from_data( target_file_path=target_path, From 42771cc3c91fad2f7db8b8e1574d7a715ccdf3ee Mon Sep 17 00:00:00 2001 From: Renata Date: Sat, 4 Jan 2025 18:13:51 -0500 Subject: [PATCH 26/26] feat: add an option to update certain metadata files when signing target files, even if no targets were modified --- taf/api/targets.py | 17 +++++++++++++++-- taf/tuf/repository.py | 32 +++++++++++++++++++------------- 2 files changed, 34 insertions(+), 15 deletions(-) diff --git a/taf/api/targets.py b/taf/api/targets.py index de65b6d94..7a30051cd 100644 --- a/taf/api/targets.py +++ b/taf/api/targets.py @@ -329,6 +329,7 @@ def register_target_files( no_commit_warning: Optional[bool] = True, reset_updated_targets_on_error: Optional[bool] = False, commit_msg: Optional[str] = None, + force_update_of_roles: Optional[str] = None, ): """ Register all files found in the target directory as targets - update the targets @@ -338,12 +339,14 @@ def register_target_files( path: Authentication repository's path. keystore: Location of the keystore files. roles_key_infos: A dictionary whose keys are role names, while values contain information about the keys. - scheme (optional): Signing scheme. Set to rsa-pkcs1v15-sha256 by default. + scheme (optional): Signing scheme. Set to rsa-pkcs1v15-sha256 by default. auth_repo (optional): If auth repository is already initialized, it can be passed and used. write (optional): Write metadata updates to disk if set to True commit (optional): Indicates if the changes should be committed and pushed automatically. prompt_for_keys (optional): Whether to ask the user to enter their key if it is not located inside the keystore directory. push (optional): Flag specifying whether to push to remote + force_update_of_roles (optional): A list of roles whose version should be updated, even + if no other changes are made Side Effects: Updates metadata files, writes changes to disk and optionally commits changes. @@ -376,9 +379,14 @@ def register_target_files( roles_key_infos, keystore, enter_info=False ) + roles_to_load = list(roles_and_targets.keys()) + if force_update_of_roles: + for role in force_update_of_roles: + if role not in roles_to_load: + roles_to_load.append(role) with manage_repo_and_signers( auth_repo, - list(roles_and_targets.keys()), + roles_to_load, keystore, scheme, prompt_for_keys, @@ -394,6 +402,11 @@ def register_target_files( ): for role, targets in roles_and_targets.items(): auth_repo.update_target_role(role, targets) + if force_update_of_roles: + for role in force_update_of_roles: + if role not in roles_and_targets: + auth_repo.update_target_role(role, None, True) + if update_snapshot_and_timestamp: auth_repo.update_snapshot_and_timestamp() diff --git a/taf/tuf/repository.py b/taf/tuf/repository.py index 9086792a1..e8de21535 100644 --- a/taf/tuf/repository.py +++ b/taf/tuf/repository.py @@ -1608,29 +1608,35 @@ def sync_snapshot_with_roles(self, roles: List[str]) -> None: for role in roles: sn.meta[f"{role}.json"].version = sn.meta[f"{role}.json"].version + 1 - def update_target_role(self, role: str, target_paths: Dict): + def update_target_role(self, role: str, target_paths: Dict, force=False): """ Update the specified target role by adding or removing target files and target objects for the specified target paths + If false is True, update the metadata files even if no target + paths are specified """ if not self.check_if_role_exists(role): raise TAFError(f"Role {role} does not exist") self.verify_signers_loaded([role]) removed_paths = [] target_files = [] - for target_path in target_paths: - full_path = self.path / TARGETS_DIRECTORY_NAME / target_path - # file removed, removed from te role - if not full_path.is_file(): - removed_paths.append(target_path) - else: - custom_data = self.get_target_file_custom_data(target_path) - target_file = self._create_target_object( - full_path, target_path, custom_data - ) - target_files.append(target_file) + if target_paths: + for target_path in target_paths: + full_path = self.path / TARGETS_DIRECTORY_NAME / target_path + # file removed, removed from te role + if not full_path.is_file(): + removed_paths.append(target_path) + else: + custom_data = self.get_target_file_custom_data(target_path) + target_file = self._create_target_object( + full_path, target_path, custom_data + ) + target_files.append(target_file) - self._modify_targets_role(target_files, removed_paths, role) + self._modify_targets_role(target_files, removed_paths, role) + elif force: + with self.edit(role) as _: + pass def update_snapshot_and_timestamp(self, force: Optional[bool] = True): """