From 73b9c6157ddf8b0ba7a5e7099a4a88b6295dcd0b Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Mon, 6 Nov 2017 08:49:45 +0100 Subject: [PATCH 01/79] Add Pylint report format --- teamscale_client/constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/teamscale_client/constants.py b/teamscale_client/constants.py index a90b4cf..cbe1208 100644 --- a/teamscale_client/constants.py +++ b/teamscale_client/constants.py @@ -107,6 +107,8 @@ class ReportFormats: SPCOP = "SPCOP" CS_COMPILER_WARNING = "CS_COMPILER_WARNING" + + PYLINT = "PYLINT" class UnitTestReportFormats: """Reports for unit test results that Teamscale understands.""" From 1ac9cdacc67cb654d062dba11fdb5867596e9705 Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Tue, 21 Nov 2017 16:29:57 +0100 Subject: [PATCH 02/79] Update to 3.8 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d1f1e99..3fba9a8 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="teamscale-client", - version="3.5.0", + version="3.8.0", author="Thomas Kinnen - CQSE GmbH", author_email="kinnen@cqse.eu", description=("A simple service client to interact with Teamscale's REST API."), From 707b2f978088a967547cb1db804bb657a57b35bf Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Tue, 21 Nov 2017 16:48:03 +0100 Subject: [PATCH 03/79] Attempt to fix travis-sphinx build --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 922eb24..b2a2263 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,7 @@ install: script: - python setup.py test - - if [[ $TRAVIS_PYTHON_VERSION == 2* ]]; then travis-sphinx --source=docs build; fi + - if [[ $TRAVIS_PYTHON_VERSION == 2* ]]; then travis-sphinx build --source=docs; fi after_success: - if [[ $TRAVIS_PYTHON_VERSION == 2* ]]; then travis-sphinx deploy; fi From f0ba73a002a59bc998450a9611e970e8f9e1c1b8 Mon Sep 17 00:00:00 2001 From: Fabian Streitel Date: Fri, 24 Nov 2017 09:59:39 +0100 Subject: [PATCH 04/79] enable new movetolastcommit parameter for report uploads --- teamscale_client/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 285c680..4f1aa58 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -259,7 +259,8 @@ def upload_report(self, report_files, report_format, timestamp, message, partiti "message": message, "partition": partition, "format": report_format, - "adjusttimestamp": "true" + "adjusttimestamp": "true", + "movetolastcommit": "true" } multiple_files = [('report', open(filename, 'rb')) for filename in report_files] response = requests.post(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, From 24d6c6cc58f6b9c5b4f3f51ae6ccb12d63f31b34 Mon Sep 17 00:00:00 2001 From: Alex von Rhein Date: Mon, 29 Jan 2018 08:12:16 +0100 Subject: [PATCH 05/79] added path suffix to connector config --- teamscale_client/data.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/teamscale_client/data.py b/teamscale_client/data.py index 6c7b985..04e1285 100644 --- a/teamscale_client/data.py +++ b/teamscale_client/data.py @@ -286,6 +286,7 @@ class SourceCodeConnectorConfiguration(ConnectorConfiguration): Empty by default. branch_transformation (Optional[str]): Regex transformations that are applied to the branch names of the repository. Empty by default. + path_suffix (Optional[str]): The suffix to append to the base URL of the repository. Empty by default. """ def __init__(self, connector_type, included_file_names, excluded_file_names="", repository_identifier="repository1", @@ -293,7 +294,7 @@ def __init__(self, connector_type, included_file_names, excluded_file_names="", content_exclude="", polling_interval=60, prepend_repository_identifier=False, end_revision="", text_filter="", source_library_connector=False, run_to_exhaustion=False, delta_size=500, path_prefix_transformation="", path_transformation="", encoding="", author_transformation="", - branch_transformation=""): + branch_transformation="", path_suffix=""): super(SourceCodeConnectorConfiguration, self).__init__(connector_type) self.options = { "Included file names": included_file_names, @@ -316,6 +317,7 @@ def __init__(self, connector_type, included_file_names, excluded_file_names="", "Encoding": encoding, "Author transformation": author_transformation, "Branch transformation": branch_transformation, + "Path suffix": path_suffix, } @@ -411,3 +413,4 @@ def __init__(self, account, enable_externals=False, externals_includes="", exter self.options["Enable Externals"] = enable_externals self.options["Externals Includes"] = externals_includes self.options["Externals Excludes"] = externals_excludes + From 666dc40f32db37549827a5985e922fc283b20b8c Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Mon, 29 Jan 2018 11:01:42 +0100 Subject: [PATCH 06/79] Fix whitespaces --- teamscale_client/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/teamscale_client/data.py b/teamscale_client/data.py index 04e1285..0ec7958 100644 --- a/teamscale_client/data.py +++ b/teamscale_client/data.py @@ -286,7 +286,7 @@ class SourceCodeConnectorConfiguration(ConnectorConfiguration): Empty by default. branch_transformation (Optional[str]): Regex transformations that are applied to the branch names of the repository. Empty by default. - path_suffix (Optional[str]): The suffix to append to the base URL of the repository. Empty by default. + path_suffix (Optional[str]): The suffix to append to the base URL of the repository. Empty by default. """ def __init__(self, connector_type, included_file_names, excluded_file_names="", repository_identifier="repository1", @@ -317,7 +317,7 @@ def __init__(self, connector_type, included_file_names, excluded_file_names="", "Encoding": encoding, "Author transformation": author_transformation, "Branch transformation": branch_transformation, - "Path suffix": path_suffix, + "Path suffix": path_suffix, } From d2a427889a8e7d6357535851caafcf7d54e4575e Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Mon, 29 Jan 2018 11:36:26 +0100 Subject: [PATCH 07/79] Prepare for 3.9.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 3fba9a8..ecfc846 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="teamscale-client", - version="3.8.0", + version="3.9.0", author="Thomas Kinnen - CQSE GmbH", author_email="kinnen@cqse.eu", description=("A simple service client to interact with Teamscale's REST API."), From 60f54ccdc43a37517c30cd079624cd04f2d0405d Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 31 Jan 2018 17:12:31 +0100 Subject: [PATCH 08/79] Added a migration tool (works for tasks and blacklist) --- .gitignore | 3 + teamscale_client/client.py | 3 +- tools/migration.py | 248 +++++++++++++++++++++++++++++++++++++ 3 files changed, 252 insertions(+), 2 deletions(-) create mode 100644 tools/migration.py diff --git a/.gitignore b/.gitignore index ba74660..997c66f 100644 --- a/.gitignore +++ b/.gitignore @@ -55,3 +55,6 @@ docs/_build/ # PyBuilder target/ + +# PyChoram +.idea/ diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 4f1aa58..fc886b7 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -54,7 +54,7 @@ def get(self, url, parameters=None): """Sends a GET request to the given service url. Args: - url (str): The URL for which to execute a PUT request + url (str): The URL for which to execute a GET request parameters (dict): parameters to attach to the url Returns: @@ -218,7 +218,6 @@ def add_metric_descriptions(self, metric_descriptions): service_url = self.get_global_service_url("external-metric") return self.put(service_url, data=to_json(metric_descriptions)) - def upload_coverage_data(self, coverage_files, coverage_format, timestamp, message, partition): """Upload coverage reports to Teamscale. It is expected that the given coverage report files can be read from the filesystem. diff --git a/tools/migration.py b/tools/migration.py new file mode 100644 index 0000000..1d5e5ef --- /dev/null +++ b/tools/migration.py @@ -0,0 +1,248 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import json +from teamscale_client import TeamscaleClient +from abc import ABC, abstractmethod + +OLD_TEAMSCALE = { + "url": "http://localhost:8080", + "project": "old", + "user": "user", + "token": "tokentoken" +} + +NEW_TEAMSCALE = { + "url": "http://localhost:8080", + "project": "new", + "user": "user", + "token": "tokentoken" +} + + +def main(): + client_new = TeamscaleClient(NEW_TEAMSCALE["url"], + NEW_TEAMSCALE["user"], + NEW_TEAMSCALE["token"], + NEW_TEAMSCALE["project"]) + + client_old = TeamscaleClient(OLD_TEAMSCALE["url"], + OLD_TEAMSCALE["user"], + OLD_TEAMSCALE["token"], + OLD_TEAMSCALE["project"]) + + BlacklistMigrator(client_old, client_new).migrate() + TaskMigrator(client_old, client_new).migrate() + + +class MigratorBase(ABC): + """ Base class for migrating data from one instance to another via REST calls. """ + + def __init__(self, old, new): + self.old = old + self.new = new + self.migrated = 0 + + @staticmethod + def get(client, service, path_param="", parameters=None): + """ Performs a GET call from the client to the service with the given parameters + and returns the response as a JSON Object. + The path parameter are additions to the path, e.g /service/id. + """ + response = client.get(client.get_project_service_url(service) + path_param, parameters) + return json.loads(response.text) + + def get_from_old(self, service, path_param="", parameters=None): + """ Performs a GET call with the given information on the instance from + which the data should be migrated and returns the response as a JSON Object. + """ + return self.get(self.old, service, path_param, parameters) + + def get_from_new(self, service, path_param="", parameters=None): + """ Performs a GET call with the given information on the instance + to which the data should be migrated and returns the response as a JSON Object. + """ + return self.get(self.new, service, path_param, parameters) + + def put(self, service, data, path_param="", parameters=None): + """ Performs a PUT call from the client to the service with the given parameters and data. + The path parameter are additions to the path, e.g /service/id. + """ + self.new.put(self.new.get_project_service_url(service) + path_param, + parameters=parameters, + json=data) + + def get_matching_finding_id(self, finding_id): + """ Tries to find a matching finding in the new instance for + the given findings id of the old instance. + If no match could be found `None` is returned. + """ + finding = self.get_from_old("findings-by-id", path_param=finding_id) + new_findings = self.get_from_new("findings", + path_param=finding["location"]["uniformPath"]) + for new_finding in new_findings: + if self.match_finding(new_finding, finding): + return new_finding["id"] + return None + + @staticmethod + def match_finding(finding1, finding2): + """ Checks if the given two findings are the same. """ + if finding1["message"] != finding2["message"]: + return False + + # some findings don't have a start line + has_line1 = "rawStartLine" in finding1["location"] + has_line2 = "rawStartLine" in finding2["location"] + if has_line1 != has_line2: + return False + if not has_line1: + return True + return finding1["location"]["rawStartLine"] == finding2["location"]["rawStartLine"] + + @abstractmethod + def migrate(self): + pass + + +class BlacklistMigrator(MigratorBase): + """ Class for migrating blacklists between two instances. + If some blacklisted finding cannot be found in the new instance, they will not + bet migrated. + """ + + def migrate(self): + """ Migrates the blacklists. """ + migrate_blacklist = self.get_blacklist_infos() + for blacklisted in migrate_blacklist: + new_id = self.get_matching_finding_id(blacklisted["findingId"]) + if not new_id: + print("could not find finding %s in new Teamscale" % blacklisted["findingId"]) + else: + print("mapped old finding %s to new finding %s" % (blacklisted["findingId"], new_id)) + self.blacklist(blacklisted, new_id) + + if len(migrate_blacklist) == 0: + print("No new blacklisted findings to migrate.") + else: + print("migrated %d/%d blacklisted findings" % (self.migrated, len(migrate_blacklist))) + + def get_blacklist_infos(self): + """ Returns all blacklist info objects from the old instance. """ + # Remove findings which have already been migrated and have the same id + blacklisted_ids = set(self.get_from_old("finding-blacklist")) - set(self.get_from_new("finding-blacklist")) + + infos = [] + for finding_id in blacklisted_ids: + info = self.get_from_old("finding-blacklist", path_param=finding_id) + if not info: + print("Blacklisted finding %s no longer exists at HEAD, not migrating" % finding_id) + else: + infos.append(info) + return infos + + def blacklist(self, blacklist_info, new_id): + """ Blacklists a finding with the given id on the new instance. """ + self.migrated += 1 + blacklist_info["findingId"] = new_id + self.put("finding-blacklist", blacklist_info, path_param=new_id) + + +class TaskMigrator(MigratorBase): + """ Class for migrating tasks between two instances. + Tasks will only be migrated if all connected findings are on the new instance as well. + """ + + def migrate(self): + """ Migrates the tasks. """ + parameters = {"with-count": True, "max": 0} + new_tasks_count = self.get_from_new("tasks", parameters=parameters)["totalTaskCount"] + 1 + + old_tasks = self.get_from_old("tasks", parameters={"details": True}) + total = len(old_tasks) + for old_task in old_tasks: + if self.task_exists(old_task): + total -= 1 + else: + if self.adjust_task(old_task, new_tasks_count): + print("migrating task %s" % old_task["id"]) + new_tasks_count += 1 + self.add_task(old_task) + if total == 0: + print("No new tasks to migrate.") + else: + print("migrated %d/%d tasks" % (self.migrated, total)) + + def adjust_task(self, task, new_id): + """ Before adding the task to the new instance, it needs to get a new id, in order to prevent + that potential existing tasks are overwritten. The ids of any connected findings need + to be changed to the corresponding findings on the new instance, as well. + If any finding cannot be matched on the new instance `False` will be returned, `True` otherwise.""" + for finding in task["findings"]: + matching_finding_id = self.get_matching_finding_id(finding["findingId"]) + if not matching_finding_id: + print("The finding %s for the task %s does not exists on the new instance." % ( + finding["findingId"], task["id"])) + return False + finding["findingId"] = matching_finding_id + task["id"] = new_id + + return True + + def add_task(self, task): + """ Adds a task to the new instance """ + self.migrated += 1 + self.put("tasks", path_param=str(task["id"]), data=task) + + def task_exists(self, old_task): + """ Checks if the given tasks already exists on the new instance. """ + new_tasks = self.get_from_new("tasks", parameters={ + "author": old_task["author"], + "assignee": old_task["assignee"], + "tags": old_task["tags"], + "details": True + }) + + for new_task in new_tasks: + if self.superficial_match(new_task, old_task) and self.task_findings_match(new_task, old_task): + return True + return False + + def task_findings_match(self, new, old): + """ Checks if the findings of the given two tasks are the same. """ + new_findings = self.get_task_findings(self.new, new) + old_findings = self.get_task_findings(self.old, old) + + for old_finding in old_findings: + match_found = False + for new_finding in new_findings: + if self.match_finding(old_finding, new_finding): + match_found = True + break + if not match_found: + return False + return True + + def superficial_match(self, task1, task2): + """ A quick check if two tasks are roughly the same. It checks the contents of some fields and + the number of findings. + """ + return self.str_task(task1) == self.str_task(task2) and len(task1["findings"]) == len(task2["findings"]) + + @staticmethod + def str_task(task): + """ Creates a simple string out of task with some of its field values. """ + return str([task[x] for x in ["subject", "description"]]) + + def get_task_findings(self, client, task): + """ Returns the findigs objects for a task (if it has any) """ + findings = [] + for entry in task["findings"]: + findings.append(self.get(client, "findings-by-id", path_param=entry["findingId"])) + return findings + + +if __name__ == "__main__": + main() From 254785dc8c6ebe096bae8d7fa4440ce343be37db Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 1 Feb 2018 09:15:12 +0100 Subject: [PATCH 09/79] Fixed some typos --- teamscale_client/client.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index fc886b7..bb2b997 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -85,7 +85,7 @@ def put(self, url, json=None, parameters=None, data=None): Raises: ServiceError: If anything goes wrong """ - headers = {'Accept': 'application/json','Content-Type': 'application/json'} + headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} response = requests.put(url, params=parameters, json=json, data=data, headers=headers, auth=self.auth_header, verify=self.sslverify, timeout=self.timeout) @@ -272,7 +272,7 @@ def upload_architectures(self, architectures, timestamp, message): """Upload architectures to Teamscale. It is expected that the given architectures can be be read from the filesystem. Args: - architectures (dict): mappping of teamscale paths to architecture files that should be uploaded. Files must be readable. + architectures (dict): mapping of teamscale paths to architecture files that should be uploaded. Files must be readable. timestamp (datetime.datetime): timestamp for which to upload the data message (str): The message to use for the generated upload commit @@ -393,7 +393,7 @@ def create_project(self, project_configuration): Raises: ServiceError: If anything goes wrong. """ - return self._add_project(project_configuration, perfrom_update_call=False) + return self._add_project(project_configuration, perform_update_call=False) def update_project(self, project_configuration): """Updates an existing project in Teamscale with the given configuration. The id of the existing project is @@ -407,20 +407,20 @@ def update_project(self, project_configuration): Raises: ServiceError: If anything goes wrong. """ - return self._add_project(project_configuration, perfrom_update_call=True) + return self._add_project(project_configuration, perform_update_call=True) - def _add_project(self, project_configuration, perfrom_update_call): - """Adds a project to Teamscale. The parameter `perfrom_update_call` specifies, whether an update call should be + def _add_project(self, project_configuration, perform_update_call): + """Adds a project to Teamscale. The parameter `perform_update_call` specifies, whether an update call should be made: - - If `perfrom_update_call` is set to `True`, re-adding a project with an existing id will update the original + - If `perform_update_call` is set to `True`, re-adding a project with an existing id will update the original project. - - If `perfrom_update_call` is set to `False`, re-adding a project with an existing id will result in an error. - - Further, if `perfrom_update_call` is set to `True`, but no project with the specified id exists, an error is + - If `perform_update_call` is set to `False`, re-adding a project with an existing id will result in an error. + - Further, if `perform_update_call` is set to `True`, but no project with the specified id exists, an error is thrown as well. Args: project_configuration (data.ProjectConfiguration): The project that is to be created (or updated). - perfrom_update_call (bool): Whether to perform an update call. + perform_update_call (bool): Whether to perform an update call. Returns: requests.Response: object generated by the upload request. @@ -429,7 +429,7 @@ def _add_project(self, project_configuration, perfrom_update_call): """ service_url = self.get_global_service_url("create-project") parameters = { - "only-config-update": perfrom_update_call + "only-config-update": perform_update_call } response = self.put(service_url, parameters=parameters, data=to_json(project_configuration)) From 99fed9527f768d24fd3a11dd3a7c72fb0b7b3dbb Mon Sep 17 00:00:00 2001 From: Fabian Streitel Date: Mon, 5 Feb 2018 10:43:49 +0100 Subject: [PATCH 10/79] fix typo --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 997c66f..e12944a 100644 --- a/.gitignore +++ b/.gitignore @@ -56,5 +56,5 @@ docs/_build/ # PyBuilder target/ -# PyChoram +# PyCharm .idea/ From 203e4a90a73951eb3d17cf1d8afbb06aa1e3d3b7 Mon Sep 17 00:00:00 2001 From: Fabian Streitel Date: Mon, 5 Feb 2018 10:58:46 +0100 Subject: [PATCH 11/79] fix typo --- tools/migration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration.py b/tools/migration.py index 1d5e5ef..3d0e1d9 100644 --- a/tools/migration.py +++ b/tools/migration.py @@ -110,7 +110,7 @@ def migrate(self): class BlacklistMigrator(MigratorBase): """ Class for migrating blacklists between two instances. If some blacklisted finding cannot be found in the new instance, they will not - bet migrated. + be migrated. """ def migrate(self): From e47b73e86a676897f3129b652779070e5d8ee167 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 6 Feb 2018 15:01:08 +0100 Subject: [PATCH 12/79] Reworked the migrating tools --- teamscale_client/client.py | 28 ++-- teamscale_client/data.py | 21 ++- tools/__init__.py | 0 tools/blacklist_migrator.py | 58 +++++++++ tools/config.template | 14 ++ tools/migration.py | 248 ------------------------------------ tools/migrator_base.py | 171 +++++++++++++++++++++++++ tools/task_migrator.py | 116 +++++++++++++++++ 8 files changed, 387 insertions(+), 269 deletions(-) create mode 100644 tools/__init__.py create mode 100755 tools/blacklist_migrator.py create mode 100644 tools/config.template delete mode 100644 tools/migration.py create mode 100644 tools/migrator_base.py create mode 100755 tools/task_migrator.py diff --git a/teamscale_client/client.py b/teamscale_client/client.py index bb2b997..f2ca2c3 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -22,8 +22,10 @@ class TeamscaleClient: username (str): The username to use for authentication access_token (str): The IDE access token to use for authentication project (str): The id of the project on which to work - sslverify: See requests' verify parameter in http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification - timeout (float): TTFB timeout in seconds, see http://docs.python-requests.org/en/master/user/quickstart/#timeouts + sslverify: See requests' verify parameter in + http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification + timeout (float): TTFB timeout in seconds, + see http://docs.python-requests.org/en/master/user/quickstart/#timeouts branch: The branch name for which to upload/retrieve data """ @@ -45,10 +47,10 @@ def check_api_version(self): """ url = self.get_global_service_url('service-api-info') response = self.get(url) - apiVersion = response.json()['apiVersion'] - if apiVersion < 3: + api_version = response.json()['apiVersion'] + if api_version < 3: raise ServiceError("Server api version " + str( - apiVersion) + " too low and not compatible. This client requires Teamscale 3.2 or newer."); + api_version) + " too low and not compatible. This client requires Teamscale 3.2 or newer.") def get(self, url, parameters=None): """Sends a GET request to the given service url. @@ -67,7 +69,7 @@ def get(self, url, parameters=None): response = requests.get(url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + raise ServiceError("GET", url, response) return response def put(self, url, json=None, parameters=None, data=None): @@ -90,7 +92,7 @@ def put(self, url, json=None, parameters=None, data=None): headers=headers, auth=self.auth_header, verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + raise ServiceError("PUT", url, response) return response def delete(self, url, parameters=None): @@ -109,7 +111,7 @@ def delete(self, url, parameters=None): response = requests.delete(url, params=parameters, auth=self.auth_header, verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + raise ServiceError("PUT", url, response) return response def add_findings_group(self, name, mapping_pattern): @@ -265,7 +267,7 @@ def upload_report(self, report_files, report_format, timestamp, message, partiti response = requests.post(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, files=multiple_files, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + raise ServiceError("POST", service_url, response) return response def upload_architectures(self, architectures, timestamp, message): @@ -291,7 +293,7 @@ def upload_architectures(self, architectures, timestamp, message): response = requests.post(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, files=architecture_files, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + raise ServiceError("GET", service_url, response) return response def upload_non_code_metrics(self, metrics, timestamp, message, partition): @@ -328,7 +330,7 @@ def get_baselines(self): response = requests.get(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + raise ServiceError("GET", service_url, response) return [Baseline(x['name'], x['description'], timestamp=x['timestamp']) for x in response.json()] def delete_baseline(self, baseline_name): @@ -435,9 +437,7 @@ def _add_project(self, project_configuration, perform_update_call): response_message = TeamscaleClient._get_response_message(response) if response_message != 'success': - raise ServiceError( - "ERROR: GET {url}: {status_code}:{message}".format(url=service_url, status_code=response.status_code, - message=response_message)) + raise ServiceError("GET", service_url, response) return response @staticmethod diff --git a/teamscale_client/data.py b/teamscale_client/data.py index 0ec7958..804c482 100644 --- a/teamscale_client/data.py +++ b/teamscale_client/data.py @@ -5,8 +5,7 @@ import datetime import time -from teamscale_client.constants import Assessment, MetricAggregation, MetricValueType, MetricProperties, \ - AssessmentMetricColors, ConnectorType +from teamscale_client.constants import Assessment, MetricAggregation, MetricValueType, MetricProperties, ConnectorType from teamscale_client.utils import auto_str @@ -17,7 +16,7 @@ class Finding(object): Args: finding_type_id (str): The type id that this finding belongs to. message (str): The main finding message - assesssment (constants.Assessment): The assessment this finding should have. Default is `YELLOW`. + assessment (constants.Assessment): The assessment this finding should have. Default is `YELLOW`. This value is only important if in Teamscale the finding enablement is set to auto, otherwise the setting from Teamscale will be used. start_offset (int): Offset from the beginning of the file, where the finding area starts @@ -66,8 +65,10 @@ def __init__(self, findings, path, content=None): Args: typeid (str): The id used to reference the finding type. - description (str): The text to display that explains what this finding type is about (and ideally how to fix it). This text will be the same for each concrete instance of the finding. - enablement (constants.Enablement): Describes the default enablement setting for this finding type, used when it is added to the analysis profile. + description (str): The text to display that explains what this finding type is about + (and ideally how to fix it) This text will be the same for each concrete instance of the finding. + enablement (constants.Enablement): Describes the default enablement setting for this finding type, used when it + is added to the analysis profile. """ @@ -181,7 +182,14 @@ def _set_date(self, date_object): class ServiceError(Exception): """Teamscale service returned an error.""" - pass + + def __init__(self, method, url, response): + self.message = "ERROR: {0} {1}: {r.status_code}:{r.text}".format(method, url, r=response) + self.response = response + + # This prevents the compressing of a response into a single line, making it unreadable (no repr()!) + def __str__(self): + return self.message @auto_str @@ -413,4 +421,3 @@ def __init__(self, account, enable_externals=False, externals_includes="", exter self.options["Enable Externals"] = enable_externals self.options["Externals Includes"] = externals_includes self.options["Externals Excludes"] = externals_excludes - diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blacklist_migrator.py b/tools/blacklist_migrator.py new file mode 100755 index 0000000..1465a27 --- /dev/null +++ b/tools/blacklist_migrator.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +from migrator_base import MigratorBase + + +def main(): + BlacklistMigrator().migrate() + + +class BlacklistMigrator(MigratorBase): + """ Class for migrating a blacklist between two instances. + If some blacklisted finding cannot be found in the new instance, they will not + be migrated. + """ + def migrate(self): + """ Migrates the blacklist. """ + blacklist_infos = self.get_blacklist_infos() + for blacklist_info in blacklist_infos: + old_id = blacklist_info["findingId"] + new_id = self.get_matching_finding_id(old_id) + if new_id is None: + self.logger.warning("Could not match finding %s to new instance" % + self.get_findings_url(self.old, old_id)) + else: + self.logger.info("Migrating blacklisted finding %s" % self.get_findings_url(old_id)) + self.blacklist_finding(blacklist_info, new_id) + + if len(blacklist_infos) == 0: + self.logger.info("No new blacklisted findings to migrate") + else: + self.logger.info("Migrated %d/%d blacklisted findings" % (self.migrated, len(blacklist_infos))) + + def get_blacklist_infos(self): + """ Returns all blacklist info objects from the old instance. """ + # Remove findings which have already been migrated and have the same id + blacklisted_ids = set(self.get_from_old("finding-blacklist")) + if len(blacklisted_ids) == 0: + self.logger.info("Old instance does not have any blacklisted findings") + exit(1) + blacklisted_ids -= set(self.get_from_new("finding-blacklist")) + + infos = [] + for finding_id in blacklisted_ids: + info = self.get_from_old("finding-blacklist", path_suffix=finding_id) + if not info: + self.logger.info("Blacklisted finding %s no longer exists at HEAD, not migrating" % finding_id) + else: + infos.append(info) + return infos + + def blacklist_finding(self, blacklist_info, new_id): + """ Blacklists a finding with the given id on the new instance. """ + self.migrated += 1 + blacklist_info["findingId"] = new_id + self.put_in_new("finding-blacklist", blacklist_info, path_suffix=new_id) + + +if __name__ == "__main__": + main() diff --git a/tools/config.template b/tools/config.template new file mode 100644 index 0000000..bab0729 --- /dev/null +++ b/tools/config.template @@ -0,0 +1,14 @@ +{ + "old_instance" : { + "url": "http://localhost:8080", + "project": "a", + "user": "user", + "token": "tokentoken" + }, + "new_instance" : { + "url": "http://localhost:8080", + "project": "b", + "user": "user", + "token": "tokentoken" + } +} \ No newline at end of file diff --git a/tools/migration.py b/tools/migration.py deleted file mode 100644 index 3d0e1d9..0000000 --- a/tools/migration.py +++ /dev/null @@ -1,248 +0,0 @@ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import json -from teamscale_client import TeamscaleClient -from abc import ABC, abstractmethod - -OLD_TEAMSCALE = { - "url": "http://localhost:8080", - "project": "old", - "user": "user", - "token": "tokentoken" -} - -NEW_TEAMSCALE = { - "url": "http://localhost:8080", - "project": "new", - "user": "user", - "token": "tokentoken" -} - - -def main(): - client_new = TeamscaleClient(NEW_TEAMSCALE["url"], - NEW_TEAMSCALE["user"], - NEW_TEAMSCALE["token"], - NEW_TEAMSCALE["project"]) - - client_old = TeamscaleClient(OLD_TEAMSCALE["url"], - OLD_TEAMSCALE["user"], - OLD_TEAMSCALE["token"], - OLD_TEAMSCALE["project"]) - - BlacklistMigrator(client_old, client_new).migrate() - TaskMigrator(client_old, client_new).migrate() - - -class MigratorBase(ABC): - """ Base class for migrating data from one instance to another via REST calls. """ - - def __init__(self, old, new): - self.old = old - self.new = new - self.migrated = 0 - - @staticmethod - def get(client, service, path_param="", parameters=None): - """ Performs a GET call from the client to the service with the given parameters - and returns the response as a JSON Object. - The path parameter are additions to the path, e.g /service/id. - """ - response = client.get(client.get_project_service_url(service) + path_param, parameters) - return json.loads(response.text) - - def get_from_old(self, service, path_param="", parameters=None): - """ Performs a GET call with the given information on the instance from - which the data should be migrated and returns the response as a JSON Object. - """ - return self.get(self.old, service, path_param, parameters) - - def get_from_new(self, service, path_param="", parameters=None): - """ Performs a GET call with the given information on the instance - to which the data should be migrated and returns the response as a JSON Object. - """ - return self.get(self.new, service, path_param, parameters) - - def put(self, service, data, path_param="", parameters=None): - """ Performs a PUT call from the client to the service with the given parameters and data. - The path parameter are additions to the path, e.g /service/id. - """ - self.new.put(self.new.get_project_service_url(service) + path_param, - parameters=parameters, - json=data) - - def get_matching_finding_id(self, finding_id): - """ Tries to find a matching finding in the new instance for - the given findings id of the old instance. - If no match could be found `None` is returned. - """ - finding = self.get_from_old("findings-by-id", path_param=finding_id) - new_findings = self.get_from_new("findings", - path_param=finding["location"]["uniformPath"]) - for new_finding in new_findings: - if self.match_finding(new_finding, finding): - return new_finding["id"] - return None - - @staticmethod - def match_finding(finding1, finding2): - """ Checks if the given two findings are the same. """ - if finding1["message"] != finding2["message"]: - return False - - # some findings don't have a start line - has_line1 = "rawStartLine" in finding1["location"] - has_line2 = "rawStartLine" in finding2["location"] - if has_line1 != has_line2: - return False - if not has_line1: - return True - return finding1["location"]["rawStartLine"] == finding2["location"]["rawStartLine"] - - @abstractmethod - def migrate(self): - pass - - -class BlacklistMigrator(MigratorBase): - """ Class for migrating blacklists between two instances. - If some blacklisted finding cannot be found in the new instance, they will not - be migrated. - """ - - def migrate(self): - """ Migrates the blacklists. """ - migrate_blacklist = self.get_blacklist_infos() - for blacklisted in migrate_blacklist: - new_id = self.get_matching_finding_id(blacklisted["findingId"]) - if not new_id: - print("could not find finding %s in new Teamscale" % blacklisted["findingId"]) - else: - print("mapped old finding %s to new finding %s" % (blacklisted["findingId"], new_id)) - self.blacklist(blacklisted, new_id) - - if len(migrate_blacklist) == 0: - print("No new blacklisted findings to migrate.") - else: - print("migrated %d/%d blacklisted findings" % (self.migrated, len(migrate_blacklist))) - - def get_blacklist_infos(self): - """ Returns all blacklist info objects from the old instance. """ - # Remove findings which have already been migrated and have the same id - blacklisted_ids = set(self.get_from_old("finding-blacklist")) - set(self.get_from_new("finding-blacklist")) - - infos = [] - for finding_id in blacklisted_ids: - info = self.get_from_old("finding-blacklist", path_param=finding_id) - if not info: - print("Blacklisted finding %s no longer exists at HEAD, not migrating" % finding_id) - else: - infos.append(info) - return infos - - def blacklist(self, blacklist_info, new_id): - """ Blacklists a finding with the given id on the new instance. """ - self.migrated += 1 - blacklist_info["findingId"] = new_id - self.put("finding-blacklist", blacklist_info, path_param=new_id) - - -class TaskMigrator(MigratorBase): - """ Class for migrating tasks between two instances. - Tasks will only be migrated if all connected findings are on the new instance as well. - """ - - def migrate(self): - """ Migrates the tasks. """ - parameters = {"with-count": True, "max": 0} - new_tasks_count = self.get_from_new("tasks", parameters=parameters)["totalTaskCount"] + 1 - - old_tasks = self.get_from_old("tasks", parameters={"details": True}) - total = len(old_tasks) - for old_task in old_tasks: - if self.task_exists(old_task): - total -= 1 - else: - if self.adjust_task(old_task, new_tasks_count): - print("migrating task %s" % old_task["id"]) - new_tasks_count += 1 - self.add_task(old_task) - if total == 0: - print("No new tasks to migrate.") - else: - print("migrated %d/%d tasks" % (self.migrated, total)) - - def adjust_task(self, task, new_id): - """ Before adding the task to the new instance, it needs to get a new id, in order to prevent - that potential existing tasks are overwritten. The ids of any connected findings need - to be changed to the corresponding findings on the new instance, as well. - If any finding cannot be matched on the new instance `False` will be returned, `True` otherwise.""" - for finding in task["findings"]: - matching_finding_id = self.get_matching_finding_id(finding["findingId"]) - if not matching_finding_id: - print("The finding %s for the task %s does not exists on the new instance." % ( - finding["findingId"], task["id"])) - return False - finding["findingId"] = matching_finding_id - task["id"] = new_id - - return True - - def add_task(self, task): - """ Adds a task to the new instance """ - self.migrated += 1 - self.put("tasks", path_param=str(task["id"]), data=task) - - def task_exists(self, old_task): - """ Checks if the given tasks already exists on the new instance. """ - new_tasks = self.get_from_new("tasks", parameters={ - "author": old_task["author"], - "assignee": old_task["assignee"], - "tags": old_task["tags"], - "details": True - }) - - for new_task in new_tasks: - if self.superficial_match(new_task, old_task) and self.task_findings_match(new_task, old_task): - return True - return False - - def task_findings_match(self, new, old): - """ Checks if the findings of the given two tasks are the same. """ - new_findings = self.get_task_findings(self.new, new) - old_findings = self.get_task_findings(self.old, old) - - for old_finding in old_findings: - match_found = False - for new_finding in new_findings: - if self.match_finding(old_finding, new_finding): - match_found = True - break - if not match_found: - return False - return True - - def superficial_match(self, task1, task2): - """ A quick check if two tasks are roughly the same. It checks the contents of some fields and - the number of findings. - """ - return self.str_task(task1) == self.str_task(task2) and len(task1["findings"]) == len(task2["findings"]) - - @staticmethod - def str_task(task): - """ Creates a simple string out of task with some of its field values. """ - return str([task[x] for x in ["subject", "description"]]) - - def get_task_findings(self, client, task): - """ Returns the findigs objects for a task (if it has any) """ - findings = [] - for entry in task["findings"]: - findings.append(self.get(client, "findings-by-id", path_param=entry["findingId"])) - return findings - - -if __name__ == "__main__": - main() diff --git a/tools/migrator_base.py b/tools/migrator_base.py new file mode 100644 index 0000000..459d149 --- /dev/null +++ b/tools/migrator_base.py @@ -0,0 +1,171 @@ +import json +import argparse +import logging +from abc import ABC, abstractmethod +from pathlib import Path +from teamscale_client import TeamscaleClient +from teamscale_client.data import ServiceError +from requests.exceptions import ConnectionError + +# TODO: migrating between different versions (messages might change) + + +def get_arguments(): + """ Parses the arguments for the migration tool. """ + parser = argparse.ArgumentParser(description="test", formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " + "see config.template.") + parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log. Can be use to " + "dry-run the migration, as it does not change anything.") + return parser.parse_args() + + +class MigratorBase(ABC): + """ Base class for migrating data from one instance to another via REST calls. """ + + def __init__(self): + args = get_arguments() + self.debug = args.debug + self.logger = self.get_logger() + self.old, self.new = self.load_config(args.config) + self.migrated = 0 + self.cache = {} + + if self.debug: + self.logger.debug("Debug Mode ON") + + def load_config(self, config_path): + """ Reads the given config defined by its path and creates the two teamscale clients from it. + One old instance (migrating from) and a new onoe (migrating to). + """ + config_file = Path(config_path) + if config_file.exists(): + try: + data = json.load(config_file.open()) + return self.get_client(data["old_instance"]), self.get_client(data["new_instance"]) + except (json.JSONDecodeError, KeyError) as e: + self.logger.exception("Config file '%s' is malformed" % config_path, exc_info=True) + except ConnectionError as e: + self.logger.exception("Connection to %s could not be established" % e.request.url) + except ServiceError as e: + self.logger.exception("Creating the teamscale clients failed.") + else: + self.logger.exception("Config file '%s' does not exist" % config_path) + exit(1) + + @staticmethod + def get_client(data): + """ Creates a teamscale client from the given data """ + return TeamscaleClient(data["url"], data["user"], data["token"], data["project"]) + + def get_logger(self): + """ Creates a logger """ + logger = logging.getLogger(__name__) + logger.setLevel(logging.INFO) + if self.debug: + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + formatter = logging.Formatter("%(levelname)-8s %(message)s") + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger + + def check_cache(self, request, use_cache): + """ If use_cache is True it checks if the cache already contains the response + for the given request and returns it. + If the cache shouldn't be used or no cache was found None is returned. + """ + request = str(request) + if use_cache and (request in self.cache): + self.logger.debug("Cache hit for %s" % request) + return self.cache[request] + return None + + def cache_request(self, request, response, use_cache): + """ If the cache should be used, the request and its response are cached. """ + if use_cache: + self.cache[str(request)] = response + + def get(self, client, service, path_suffix="", parameters=None, use_cache=True): + """ Performs a GET call from the client to the service with the given parameters + and returns the response as a JSON Object. + Args: + path_suffix(str): Will be added to the end of the project service URL + """ + url = client.get_project_service_url(service) + path_suffix + + response = self.check_cache((url, parameters), use_cache) + if response is None: + try: + self.logger.debug("Service Call: {}".format((url, parameters))) + response = client.get(url, parameters).json() + except ServiceError as e: + self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) + exit(1) + self.cache_request((url, parameters), response, use_cache) + return response + + def get_from_old(self, service, path_suffix="", parameters=None, use_cache=True): + """ Performs a GET call with the given information on the instance from + which the data should be migrated and returns the response as a JSON Object. + Args: + path_suffix(str): Will be added to the end of the project service URL + """ + return self.get(self.old, service, path_suffix, parameters, use_cache) + + def get_from_new(self, service, path_suffix="", parameters=None, use_cache=True): + """ Performs a GET call with the given information on the instance + to which the data should be migrated and returns the response as a JSON Object. + Args: + path_suffix(str): Will be added to the end of the project service URL + """ + return self.get(self.new, service, path_suffix, parameters, use_cache) + + def put_in_new(self, service, data, path_suffix="", parameters=None): + """ Performs a PUT call from the client to the service with the given parameters and data. + The path parameter are additions to the path, e.g /service/id. + Args: + path_suffix(str): Will be added to the end of the project service URL + """ + if not self.debug: + self.new.put(self.new.get_project_service_url(service) + path_suffix, + parameters=parameters, + json=data) + + def get_matching_finding_id(self, finding_id): + """ Tries to find a matching finding in the new instance for + the given findings id of the old instance. + If no match could be found `None` is returned. + """ + finding = self.get_from_old("findings-by-id", path_suffix=finding_id) + new_findings = self.get_from_new("findings", path_suffix=finding["location"]["uniformPath"]) + for new_finding in new_findings: + if self.match_finding(new_finding, finding): + return new_finding["id"] + return None + + def get_findings_url(self, findings_id, client=None): + """ Creates a url link to the finding with the given id on the given Teamscale """ + if client is None: + client = self.old + return "{0.url}/findings.html#details/{0.project}/?id={1}".format(client, findings_id) + + @staticmethod + def match_finding(finding1, finding2): + """ Checks if the given two findings are the same. """ + if finding1["message"] != finding2["message"]: + return False + + # some findings don't have a start line + has_line1 = "rawStartLine" in finding1["location"] + has_line2 = "rawStartLine" in finding2["location"] + if has_line1 != has_line2: + return False + if not has_line1: + return True + return finding1["location"]["rawStartLine"] == finding2["location"]["rawStartLine"] + + @abstractmethod + def migrate(self): + """ Migrates the date from the old instance to the new one """ + pass diff --git a/tools/task_migrator.py b/tools/task_migrator.py new file mode 100755 index 0000000..88f8e52 --- /dev/null +++ b/tools/task_migrator.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +from migrator_base import MigratorBase + + +def main(): + """ Migrates the task from the old instance to the new one. + It automatically reads the arguments from the command line. """ + TaskMigrator().migrate() + + +class TaskMigrator(MigratorBase): + """ Class for migrating tasks between two instances. + Tasks will only be migrated if all connected findings are on the new instance as well. + """ + def migrate(self): + """ Migrates the tasks. """ + old_tasks = self.get_filtered_tasks() + for old_task in old_tasks: + old_task_id = old_task["id"] + if self.adjust_task(old_task): + self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) + self.add_task(old_task) + + if len(old_tasks) == 0: + self.logger.info("No new tasks to migrate.") + else: + self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) + + def get_filtered_tasks(self): + """ Returns a list comprising of the tasks of the old instance which are not yet + migrated to the new instance. + """ + old_tasks = self.get_from_old("tasks", parameters={"details": True}) + return list(filter(lambda task: not self.task_exists(task), old_tasks)) + + def adjust_task(self, task): + """ Before adding the task to the new instance the ids of any connected findings need + to be changed to the corresponding findings on the new instance. + If any finding cannot be matched on the new instance `False` will be returned, `True` otherwise. + """ + for finding in task["findings"]: + matching_finding_id = self.get_matching_finding_id(finding["findingId"]) + if matching_finding_id is None: + self.logger.warn("The finding %s for the task %s does not exists on the new instance." % ( + self.get_findings_url(self.old, finding["findingId"]), task["id"])) + return False + finding["findingId"] = matching_finding_id + # If the id is 0, the backend will assign a valid new id + task["id"] = 0 + return True + + def get_tasks_url(self, task_id, client=None): + """ Creates a url of the old instance to the task with the given id. """ + if client is None: + client = self.old + return "{0.url}/tasks.html#details/{0.project}/?id={1}".format(client, task_id) + + def add_task(self, task): + """ Adds a task to the new instance """ + self.migrated += 1 + self.put_in_new("tasks", path_suffix=str(task["id"]), data=task) + + def task_exists(self, old_task): + """ Checks if the given tasks already exists on the new instance. """ + new_tasks = self.get_from_new("tasks", parameters={ + "author": old_task["author"], + "assignee": old_task["assignee"], + "tags": old_task["tags"], + "details": True + }) + + for new_task in new_tasks: + if self.superficial_match(new_task, old_task) and self.task_findings_match(new_task, old_task): + return True + return False + + def task_findings_match(self, new, old): + """ Checks if the findings of the given two tasks are the same. + Returns True if they are, False otherwise. + """ + new_findings = self.get_task_findings(self.new, new) + old_findings = self.get_task_findings(self.old, old) + + for old_finding in old_findings: + if not self.finding_match_in_list(old_finding, new_findings): + return False + return True + + def finding_match_in_list(self, finding, finding_list): + """ Checks whether there is a match for a finding in a list of findings. """ + for new_finding in finding_list: + if self.match_finding(finding, new_finding): + return True + return False + + def superficial_match(self, task1, task2): + """ A quick check if two tasks are roughly the same. It checks the contents of some fields and + the number of findings. + """ + return self.task_to_list(task1) == self.task_to_list(task2) and len(task1["findings"]) == len(task2["findings"]) + + @staticmethod + def task_to_list(task): + """ Creates a simple string out of task with some of its field values. """ + return [task[x] for x in ["subject", "description"]] + + def get_task_findings(self, client, task): + """ Returns the findings objects for a task (if it has any) """ + findings = [] + for entry in task["findings"]: + findings.append(self.get(client, "findings-by-id", path_suffix=entry["findingId"])) + return findings + + +if __name__ == "__main__": + main() From 5f7fae60e4ed972586e0d4ea5f1de1e34dcb4661 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 6 Feb 2018 17:04:45 +0100 Subject: [PATCH 13/79] Added warning and mitigations for migrating between different TS-Versions --- teamscale_client/client.py | 5 +++++ tools/migrator_base.py | 37 ++++++++++++++++++++----------------- 2 files changed, 25 insertions(+), 17 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index f2ca2c3..01f884a 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -384,6 +384,11 @@ def get_projects(self): creation_timestamp=x['creationTimestamp'], alias=x.get('alias'), deleting=x['deleting'], reanalyzing=x['reanalyzing']) for x in response.json()] + def get_version(self): + """ Retrieves the teamscale version """ + response_text = self.get(self.get_global_service_url("health-metrics"), {"metric": "version"}).text + return response_text.split()[1] + def create_project(self, project_configuration): """Creates a project with the specified configuration in Teamscale. diff --git a/tools/migrator_base.py b/tools/migrator_base.py index 459d149..17be948 100644 --- a/tools/migrator_base.py +++ b/tools/migrator_base.py @@ -7,8 +7,6 @@ from teamscale_client.data import ServiceError from requests.exceptions import ConnectionError -# TODO: migrating between different versions (messages might change) - def get_arguments(): """ Parses the arguments for the migration tool. """ @@ -28,6 +26,7 @@ def __init__(self): self.debug = args.debug self.logger = self.get_logger() self.old, self.new = self.load_config(args.config) + self.versions_match = self.check_versions() self.migrated = 0 self.cache = {} @@ -43,7 +42,7 @@ def load_config(self, config_path): try: data = json.load(config_file.open()) return self.get_client(data["old_instance"]), self.get_client(data["new_instance"]) - except (json.JSONDecodeError, KeyError) as e: + except (json.JSONDecodeError, KeyError): self.logger.exception("Config file '%s' is malformed" % config_path, exc_info=True) except ConnectionError as e: self.logger.exception("Connection to %s could not be established" % e.request.url) @@ -53,6 +52,18 @@ def load_config(self, config_path): self.logger.exception("Config file '%s' does not exist" % config_path) exit(1) + def check_versions(self): + """ Checks if the versions of both clients match. If not False will be returned + and a warning will be logged. + """ + old_version = self.old.get_version() + new_version = self.new.get_version() + if old_version != new_version: + self.logger.warning("Teamscale versions of the old (%s) and new (%s) instance differ!" % + (old_version, new_version)) + return False + return True + @staticmethod def get_client(data): """ Creates a teamscale client from the given data """ @@ -150,20 +161,12 @@ def get_findings_url(self, findings_id, client=None): client = self.old return "{0.url}/findings.html#details/{0.project}/?id={1}".format(client, findings_id) - @staticmethod - def match_finding(finding1, finding2): - """ Checks if the given two findings are the same. """ - if finding1["message"] != finding2["message"]: - return False - - # some findings don't have a start line - has_line1 = "rawStartLine" in finding1["location"] - has_line2 = "rawStartLine" in finding2["location"] - if has_line1 != has_line2: - return False - if not has_line1: - return True - return finding1["location"]["rawStartLine"] == finding2["location"]["rawStartLine"] + def match_finding(self, finding1, finding2): + """ Checks if the given two findings are the same. This is done by comparing their location and message. + If the version of the two TS instances don't match, only the location is compared """ + location_match = finding1["location"] == finding2["location"] + message_match = finding1["message"] == finding2["message"] + return location_match and (message_match or not self.versions_match) @abstractmethod def migrate(self): From 2f5da64451d36e928e4709efbaadf9394d44846e Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Feb 2018 14:29:13 +0100 Subject: [PATCH 14/79] Added an exemplary test --- tests/task_migrator_test.py | 57 ++++++++++++++++ tests/test_utils.py | 8 +++ tools/migration/__init__.py | 0 tools/{ => migration}/blacklist_migrator.py | 5 +- tools/{ => migration}/config.template | 0 tools/{ => migration}/migrator_base.py | 76 ++++++++++++--------- tools/{ => migration}/task_migrator.py | 11 +-- 7 files changed, 118 insertions(+), 39 deletions(-) create mode 100644 tests/task_migrator_test.py create mode 100644 tests/test_utils.py create mode 100644 tools/migration/__init__.py rename tools/{ => migration}/blacklist_migrator.py (93%) rename tools/{ => migration}/config.template (100%) rename tools/{ => migration}/migrator_base.py (80%) rename tools/{ => migration}/task_migrator.py (93%) diff --git a/tests/task_migrator_test.py b/tests/task_migrator_test.py new file mode 100644 index 0000000..8d6ee4b --- /dev/null +++ b/tests/task_migrator_test.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import +from __future__ import unicode_literals + +import responses +from tools.migration.task_migrator import TaskMigrator +from test_utils import get_global_service_mock +from copy import deepcopy + +URL = "http://localhost:8080" +CONFIG = { + "old_instance": { + "url": URL, + "project": "old", + "user": "admin", + "token": "token" + }, + "new_instance": { + "url": URL, + "project": "new", + "user": "admin", + "token": "token" + } +} + + +class TestTaskMigrator: + @staticmethod + def get_migrator(config): + TestTaskMigrator.create_necessary_client_responses(URL) + return TaskMigrator(config, False) + + @staticmethod + def create_necessary_client_responses(url, version=40000): + """ Creates responses which are necessary to create a client """ + responses.add(responses.GET, get_global_service_mock(url, "service-api-info"), + status=200, content_type="application/json", body='{ "apiVersion": 3 }') + responses.add(responses.GET, get_global_service_mock(url, "health-metrics"), + status=200, content_type="text/plain", body="version %s 0" % version) + + def get_default_migrator(self): + """ Returns the migrator with the default settings """ + return self.get_migrator(CONFIG) + + @responses.activate + def test_different_versions(self, caplog): + """ Tests the case where we want to migrate between two TS-instances with a different + version. A warning should be logged and the version_match flag should be False. + """ + config = deepcopy(CONFIG) + new_url = "http://localhost:8081" + config["new_instance"]["url"] = new_url + self.create_necessary_client_responses(new_url, version=30000) + migrator = self.get_migrator(config) + + warning = list(filter(lambda x: x.levelname == "WARNING" and "version" in x.message, caplog.records)) + assert len(warning) == 1, "Missing warning about version mismatch" + assert not migrator.versions_match, "Flag 'versions_match' should be False" diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..a52e805 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,8 @@ +import re + +def get_project_service_mock(url, service_id): + return re.compile(r'%s/p/foo/%s/.*' % (url, service_id)) + + +def get_global_service_mock(url, service_id): + return re.compile(r'%s/%s/.*' % (url, service_id)) diff --git a/tools/migration/__init__.py b/tools/migration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/blacklist_migrator.py b/tools/migration/blacklist_migrator.py similarity index 93% rename from tools/blacklist_migrator.py rename to tools/migration/blacklist_migrator.py index 1465a27..f8ee243 100755 --- a/tools/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 -from migrator_base import MigratorBase +from migration.migrator_base import MigratorBase, get_arguments def main(): - BlacklistMigrator().migrate() + config, debug = get_arguments() + BlacklistMigrator(config, debug).migrate() class BlacklistMigrator(MigratorBase): diff --git a/tools/config.template b/tools/migration/config.template similarity index 100% rename from tools/config.template rename to tools/migration/config.template diff --git a/tools/migrator_base.py b/tools/migration/migrator_base.py similarity index 80% rename from tools/migrator_base.py rename to tools/migration/migrator_base.py index 17be948..5734073 100644 --- a/tools/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -15,17 +15,46 @@ def get_arguments(): "see config.template.") parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log. Can be use to " "dry-run the migration, as it does not change anything.") - return parser.parse_args() + args = parser.parse_args() + return load_config_json(args.config), args.debug + + +def load_config_json(path): + """ Loads the config data as a JSON and returns it. """ + logger = MigratorBase.logger + config_file = Path(path) + if config_file.exists(): + try: + return json.load(config_file.open()) + except json.JSONDecodeError: + logger.exception("Config file '%s' is malformed" % path, exc_info=True) + else: + logger.exception("Config file '%s' does not exist" % path) + exit(1) + + +def create_logger(): + """ Creates a logger """ + logger = logging.getLogger(__name__) + logger.setLevel(logging.INFO) + handler = logging.StreamHandler() + formatter = logging.Formatter("%(levelname)-8s %(message)s (%(filename)-0s:%(lineno)-0s)") + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger class MigratorBase(ABC): """ Base class for migrating data from one instance to another via REST calls. """ + logger = create_logger() - def __init__(self): - args = get_arguments() - self.debug = args.debug - self.logger = self.get_logger() - self.old, self.new = self.load_config(args.config) + def __init__(self, config_data, debug=False): + self.debug = debug + if self.debug: + self.logger.setLevel(logging.DEBUG) + else: + self.logger.setLevel(logging.INFO) + self.old, self.new = self.create_clients(config_data) self.versions_match = self.check_versions() self.migrated = 0 self.cache = {} @@ -33,23 +62,18 @@ def __init__(self): if self.debug: self.logger.debug("Debug Mode ON") - def load_config(self, config_path): + def create_clients(self, config_data): """ Reads the given config defined by its path and creates the two teamscale clients from it. One old instance (migrating from) and a new onoe (migrating to). """ - config_file = Path(config_path) - if config_file.exists(): - try: - data = json.load(config_file.open()) - return self.get_client(data["old_instance"]), self.get_client(data["new_instance"]) - except (json.JSONDecodeError, KeyError): - self.logger.exception("Config file '%s' is malformed" % config_path, exc_info=True) - except ConnectionError as e: - self.logger.exception("Connection to %s could not be established" % e.request.url) - except ServiceError as e: - self.logger.exception("Creating the teamscale clients failed.") - else: - self.logger.exception("Config file '%s' does not exist" % config_path) + try: + return self.get_client(config_data["old_instance"]), self.get_client(config_data["new_instance"]) + except KeyError: + self.logger.exception("Config data is malformed") + except ConnectionError as e: + self.logger.exception("Connection to %s could not be established" % e.request.url) + except ServiceError: + self.logger.exception("Creating the teamscale clients failed.") exit(1) def check_versions(self): @@ -69,18 +93,6 @@ def get_client(data): """ Creates a teamscale client from the given data """ return TeamscaleClient(data["url"], data["user"], data["token"], data["project"]) - def get_logger(self): - """ Creates a logger """ - logger = logging.getLogger(__name__) - logger.setLevel(logging.INFO) - if self.debug: - logger.setLevel(logging.DEBUG) - handler = logging.StreamHandler() - formatter = logging.Formatter("%(levelname)-8s %(message)s") - handler.setFormatter(formatter) - logger.addHandler(handler) - return logger - def check_cache(self, request, use_cache): """ If use_cache is True it checks if the cache already contains the response for the given request and returns it. diff --git a/tools/task_migrator.py b/tools/migration/task_migrator.py similarity index 93% rename from tools/task_migrator.py rename to tools/migration/task_migrator.py index 88f8e52..cf2dcb2 100755 --- a/tools/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,11 +1,12 @@ #!/usr/bin/env python3 -from migrator_base import MigratorBase +from migration.migrator_base import MigratorBase, get_arguments def main(): """ Migrates the task from the old instance to the new one. It automatically reads the arguments from the command line. """ - TaskMigrator().migrate() + (config, debug) = get_arguments() + TaskMigrator(config, debug).migrate() class TaskMigrator(MigratorBase): @@ -74,12 +75,12 @@ def task_exists(self, old_task): return True return False - def task_findings_match(self, new, old): + def task_findings_match(self, new_task, old_task): """ Checks if the findings of the given two tasks are the same. Returns True if they are, False otherwise. """ - new_findings = self.get_task_findings(self.new, new) - old_findings = self.get_task_findings(self.old, old) + new_findings = self.get_task_findings(self.new, new_task) + old_findings = self.get_task_findings(self.old, old_task) for old_finding in old_findings: if not self.finding_match_in_list(old_finding, new_findings): From f4f4115c341b7c04e15fc9aaba3caec1f72d64b8 Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Feb 2018 14:34:32 +0100 Subject: [PATCH 15/79] Fixed some findings --- tests/task_migrator_test.py | 5 +++++ tests/test_utils.py | 5 +---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/task_migrator_test.py b/tests/task_migrator_test.py index 8d6ee4b..1137fed 100644 --- a/tests/task_migrator_test.py +++ b/tests/task_migrator_test.py @@ -24,8 +24,13 @@ class TestTaskMigrator: + """ Simple class for bundling the test for the task migration. """ @staticmethod def get_migrator(config): + """ + Returns a task migrator with the given config. + For an example config look at CONFIG + """ TestTaskMigrator.create_necessary_client_responses(URL) return TaskMigrator(config, False) diff --git a/tests/test_utils.py b/tests/test_utils.py index a52e805..bd04b66 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,8 +1,5 @@ import re -def get_project_service_mock(url, service_id): - return re.compile(r'%s/p/foo/%s/.*' % (url, service_id)) - - def get_global_service_mock(url, service_id): + """ Creates a url for a global service with the given url and service """ return re.compile(r'%s/%s/.*' % (url, service_id)) From 3939ac3b9d9f30e28ce7a94c6629183813d9779c Mon Sep 17 00:00:00 2001 From: pawelka Date: Mon, 12 Feb 2018 19:13:50 +0100 Subject: [PATCH 16/79] Changed module name --- tools/migration/blacklist_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index f8ee243..a760e8f 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from migration.migrator_base import MigratorBase, get_arguments +from migrator_base import MigratorBase, get_arguments def main(): From 9e2a569c0e941d47ef9963ba404f7efaa50d2753 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:04:32 +0100 Subject: [PATCH 17/79] Added fallback for missing service call --- .gitignore | 1 + teamscale_client/client.py | 5 ----- tools/migration/migrator_base.py | 16 +++++++++++++--- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.gitignore b/.gitignore index e12944a..fe5ace0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] +.pytest_cache/ # C extensions *.so diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 01f884a..f2ca2c3 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -384,11 +384,6 @@ def get_projects(self): creation_timestamp=x['creationTimestamp'], alias=x.get('alias'), deleting=x['deleting'], reanalyzing=x['reanalyzing']) for x in response.json()] - def get_version(self): - """ Retrieves the teamscale version """ - response_text = self.get(self.get_global_service_url("health-metrics"), {"metric": "version"}).text - return response_text.split()[1] - def create_project(self, project_configuration): """Creates a project with the specified configuration in Teamscale. diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 5734073..218a2bc 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -80,14 +80,25 @@ def check_versions(self): """ Checks if the versions of both clients match. If not False will be returned and a warning will be logged. """ - old_version = self.old.get_version() - new_version = self.new.get_version() + old_version = self.get_teamscale_version(self.old) + new_version = self.get_teamscale_version(self.new) + if old_version != new_version: self.logger.warning("Teamscale versions of the old (%s) and new (%s) instance differ!" % (old_version, new_version)) return False return True + def get_teamscale_version(self, client): + """ Retrieves the teamscale version or 'unknown' if the version could not be fetched. """ + try: + response_text = client.get(client.get_global_service_url("health-metrics"), {"metric": "version"}).text + except ServiceError as e: + self.logger.warning("Unable to fetch teamscale version for %s " + "(Version too old?) (Reason: %s)" % (client.url, e.response.status_code)) + return "unknown" + return response_text.split()[1] + @staticmethod def get_client(data): """ Creates a teamscale client from the given data """ @@ -124,7 +135,6 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): response = client.get(url, parameters).json() except ServiceError as e: self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) - exit(1) self.cache_request((url, parameters), response, use_cache) return response From 2bb2f3491a42941b503452d7b1de2d2f42ab9a9f Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:07:25 +0100 Subject: [PATCH 18/79] Disabled api check. Not working for 3.6 even though 3.2 should be fine --- teamscale_client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index f2ca2c3..31e13a5 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -37,7 +37,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch - self.check_api_version() + #self.check_api_version() def check_api_version(self): """Verifies the server's api version and connectivity. From 3a9821bda391cd47e7a8f91a27c67224d09c8559 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:42:53 +0100 Subject: [PATCH 19/79] Revert Change --- teamscale_client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 31e13a5..f2ca2c3 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -37,7 +37,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch - #self.check_api_version() + self.check_api_version() def check_api_version(self): """Verifies the server's api version and connectivity. From c858ef6d1784dd57ff87acf14ed6460ec446c2dc Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:48:16 +0100 Subject: [PATCH 20/79] Changed module import --- tools/migration/task_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index cf2dcb2..c9f61cb 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from migration.migrator_base import MigratorBase, get_arguments +from migrator_base import MigratorBase, get_arguments def main(): From 9b5aa1e5ad857dd4b63c254938241a4677690eaa Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:50:45 +0100 Subject: [PATCH 21/79] Revert "Changed module import" This reverts commit 4b0df559179178ef75f857dbb1983d675cbde44f. --- tools/migration/task_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index c9f61cb..cf2dcb2 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from migrator_base import MigratorBase, get_arguments +from migration.migrator_base import MigratorBase, get_arguments def main(): From 7f093eae9d1dd1d14db04bc6e355c2231205ea3c Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:50:58 +0100 Subject: [PATCH 22/79] Revert "Revert Change" This reverts commit eee05e6be74b585990f4d4fc339831a0c07b8aec. --- teamscale_client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index f2ca2c3..31e13a5 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -37,7 +37,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch - self.check_api_version() + #self.check_api_version() def check_api_version(self): """Verifies the server's api version and connectivity. From 91dd8b2e0d27af026e06176d1909d0b74e555925 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:51:00 +0100 Subject: [PATCH 23/79] Revert "Disabled api check. Not working for 3.6 even though 3.2 should be fine" This reverts commit 99f40667210f47a4cf500a4b9da8ba3bef667d89. --- teamscale_client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 31e13a5..f2ca2c3 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -37,7 +37,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch - #self.check_api_version() + self.check_api_version() def check_api_version(self): """Verifies the server's api version and connectivity. From f0252209c710036b571883ac95febf29ea8e47d3 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:51:02 +0100 Subject: [PATCH 24/79] Revert "Added fallback for missing service call" This reverts commit 55ffb8154fd3032ceb007a4f87340ee204be8d8a. --- .gitignore | 1 - teamscale_client/client.py | 5 +++++ tools/migration/migrator_base.py | 16 +++------------- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/.gitignore b/.gitignore index fe5ace0..e12944a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] -.pytest_cache/ # C extensions *.so diff --git a/teamscale_client/client.py b/teamscale_client/client.py index f2ca2c3..01f884a 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -384,6 +384,11 @@ def get_projects(self): creation_timestamp=x['creationTimestamp'], alias=x.get('alias'), deleting=x['deleting'], reanalyzing=x['reanalyzing']) for x in response.json()] + def get_version(self): + """ Retrieves the teamscale version """ + response_text = self.get(self.get_global_service_url("health-metrics"), {"metric": "version"}).text + return response_text.split()[1] + def create_project(self, project_configuration): """Creates a project with the specified configuration in Teamscale. diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 218a2bc..5734073 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -80,25 +80,14 @@ def check_versions(self): """ Checks if the versions of both clients match. If not False will be returned and a warning will be logged. """ - old_version = self.get_teamscale_version(self.old) - new_version = self.get_teamscale_version(self.new) - + old_version = self.old.get_version() + new_version = self.new.get_version() if old_version != new_version: self.logger.warning("Teamscale versions of the old (%s) and new (%s) instance differ!" % (old_version, new_version)) return False return True - def get_teamscale_version(self, client): - """ Retrieves the teamscale version or 'unknown' if the version could not be fetched. """ - try: - response_text = client.get(client.get_global_service_url("health-metrics"), {"metric": "version"}).text - except ServiceError as e: - self.logger.warning("Unable to fetch teamscale version for %s " - "(Version too old?) (Reason: %s)" % (client.url, e.response.status_code)) - return "unknown" - return response_text.split()[1] - @staticmethod def get_client(data): """ Creates a teamscale client from the given data """ @@ -135,6 +124,7 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): response = client.get(url, parameters).json() except ServiceError as e: self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) + exit(1) self.cache_request((url, parameters), response, use_cache) return response From 08232c941db835953c7a73149762bf6afcaa33aa Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 10:53:06 +0100 Subject: [PATCH 25/79] Changed import and added pytest cache to gitignore --- .gitignore | 1 + tools/migration/task_migrator.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index e12944a..fe5ace0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] +.pytest_cache/ # C extensions *.so diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index cf2dcb2..c9f61cb 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from migration.migrator_base import MigratorBase, get_arguments +from migrator_base import MigratorBase, get_arguments def main(): From 24d5cf06eee620da8c218e38c146dd191d579587 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:05:48 +0100 Subject: [PATCH 26/79] Added handling of missing findings --- tools/migration/migrator_base.py | 23 ++++++++++++++++++++--- tools/migration/task_migrator.py | 5 ++++- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 5734073..f8fd287 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -123,8 +123,11 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): self.logger.debug("Service Call: {}".format((url, parameters))) response = client.get(url, parameters).json() except ServiceError as e: - self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) - exit(1) + status_code = e.response.status_code + if status_code in (500, 404): + self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) + elif status_code == 400: + raise self.cache_request((url, parameters), response, use_cache) return response @@ -160,13 +163,27 @@ def get_matching_finding_id(self, finding_id): the given findings id of the old instance. If no match could be found `None` is returned. """ - finding = self.get_from_old("findings-by-id", path_suffix=finding_id) + finding = self.get_finding_by_id(self.old, finding_id) + if finding is None: + return None + new_findings = self.get_from_new("findings", path_suffix=finding["location"]["uniformPath"]) for new_finding in new_findings: if self.match_finding(new_finding, finding): return new_finding["id"] return None + def get_finding_by_id(self, client, finding_id): + """ Returns the finding with the specified id from the given client. + If no finding with that ID can be found `None` is returned. + """ + try: + return client.get("findings-by-id", path_suffix=finding_id) + except ServiceError as e: + if e.response.status_code == 400: + self.logger.info("Finding with id %s not found. Skipping." % finding_id) + return None + def get_findings_url(self, findings_id, client=None): """ Creates a url link to the finding with the given id on the given Teamscale """ if client is None: diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index c9f61cb..f49d28e 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -109,7 +109,10 @@ def get_task_findings(self, client, task): """ Returns the findings objects for a task (if it has any) """ findings = [] for entry in task["findings"]: - findings.append(self.get(client, "findings-by-id", path_suffix=entry["findingId"])) + finding = self.get_finding_by_id(client, entry["findingId"]) + if finding is None: + continue + findings.append(finding) return findings From 1d5625d27cfa71a852e02a8e4ae0b3a0f8101236 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:08:54 +0100 Subject: [PATCH 27/79] Fixed bug --- tools/migration/migrator_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index f8fd287..815ca04 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -178,7 +178,7 @@ def get_finding_by_id(self, client, finding_id): If no finding with that ID can be found `None` is returned. """ try: - return client.get("findings-by-id", path_suffix=finding_id) + return self.get(client, "findings-by-id", path_suffix=finding_id) except ServiceError as e: if e.response.status_code == 400: self.logger.info("Finding with id %s not found. Skipping." % finding_id) From 0776b88b306295f1691166b21bdea3b0b194cffc Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:23:55 +0100 Subject: [PATCH 28/79] Added a dry-run option --- tools/migration/blacklist_migrator.py | 3 +-- tools/migration/migrator_base.py | 14 ++++++++------ tools/migration/task_migrator.py | 3 +-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index a760e8f..9814497 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -3,8 +3,7 @@ def main(): - config, debug = get_arguments() - BlacklistMigrator(config, debug).migrate() + BlacklistMigrator(*get_arguments()).migrate() class BlacklistMigrator(MigratorBase): diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 815ca04..61cdedf 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -13,10 +13,11 @@ def get_arguments(): parser = argparse.ArgumentParser(description="test", formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " "see config.template.") - parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log. Can be use to " - "dry-run the migration, as it does not change anything.") + parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log.") + parser.add_argument("--dry-run", action="store_true", help="Dry-run for the migration. See what would happen " + "without any consequences") args = parser.parse_args() - return load_config_json(args.config), args.debug + return load_config_json(args.config), args.debug, args.dry_run def load_config_json(path): @@ -48,12 +49,13 @@ class MigratorBase(ABC): """ Base class for migrating data from one instance to another via REST calls. """ logger = create_logger() - def __init__(self, config_data, debug=False): + def __init__(self, config_data, debug=False, dry_run=False): self.debug = debug if self.debug: self.logger.setLevel(logging.DEBUG) else: self.logger.setLevel(logging.INFO) + self.dry_run = dry_run self.old, self.new = self.create_clients(config_data) self.versions_match = self.check_versions() self.migrated = 0 @@ -153,7 +155,7 @@ def put_in_new(self, service, data, path_suffix="", parameters=None): Args: path_suffix(str): Will be added to the end of the project service URL """ - if not self.debug: + if not self.dry_run: self.new.put(self.new.get_project_service_url(service) + path_suffix, parameters=parameters, json=data) @@ -181,7 +183,7 @@ def get_finding_by_id(self, client, finding_id): return self.get(client, "findings-by-id", path_suffix=finding_id) except ServiceError as e: if e.response.status_code == 400: - self.logger.info("Finding with id %s not found. Skipping." % finding_id) + self.logger.debug("Finding with id %s not found. Skipping." % finding_id) return None def get_findings_url(self, findings_id, client=None): diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index f49d28e..7192872 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -5,8 +5,7 @@ def main(): """ Migrates the task from the old instance to the new one. It automatically reads the arguments from the command line. """ - (config, debug) = get_arguments() - TaskMigrator(config, debug).migrate() + TaskMigrator(*get_arguments()).migrate() class TaskMigrator(MigratorBase): From dd2b5242569901ea32dc7769a42721dae529ce78 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:34:36 +0100 Subject: [PATCH 29/79] Added more logs --- tools/migration/blacklist_migrator.py | 10 ++++++---- tools/migration/task_migrator.py | 12 ++++++++---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index 9814497..b2f3264 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -14,6 +14,11 @@ class BlacklistMigrator(MigratorBase): def migrate(self): """ Migrates the blacklist. """ blacklist_infos = self.get_blacklist_infos() + if len(blacklist_infos) == 0: + self.logger.info("No new blacklisted findings to migrate") + exit(1) + + self.logger.info("Migrating %s blacklisted findings" % len(blacklist_infos)) for blacklist_info in blacklist_infos: old_id = blacklist_info["findingId"] new_id = self.get_matching_finding_id(old_id) @@ -24,10 +29,7 @@ def migrate(self): self.logger.info("Migrating blacklisted finding %s" % self.get_findings_url(old_id)) self.blacklist_finding(blacklist_info, new_id) - if len(blacklist_infos) == 0: - self.logger.info("No new blacklisted findings to migrate") - else: - self.logger.info("Migrated %d/%d blacklisted findings" % (self.migrated, len(blacklist_infos))) + self.logger.info("Migrated %d/%d blacklisted findings" % (self.migrated, len(blacklist_infos))) def get_blacklist_infos(self): """ Returns all blacklist info objects from the old instance. """ diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 7192872..0966da0 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -15,16 +15,20 @@ class TaskMigrator(MigratorBase): def migrate(self): """ Migrates the tasks. """ old_tasks = self.get_filtered_tasks() + if len(old_tasks) == 0: + self.logger.info("No new tasks to migrate.") + exit(1) + + self.logger.info("Migrating %s tasks" % len(old_tasks)) for old_task in old_tasks: old_task_id = old_task["id"] if self.adjust_task(old_task): self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) self.add_task(old_task) + else: + self.logger.warning("Task %s could not be migrated" % self.get_tasks_url(old_task_id)) - if len(old_tasks) == 0: - self.logger.info("No new tasks to migrate.") - else: - self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) + self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) def get_filtered_tasks(self): """ Returns a list comprising of the tasks of the old instance which are not yet From 7d77d8222d30e0a4f4407b67a320dfed01cb67f1 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:39:52 +0100 Subject: [PATCH 30/79] Added logging level print --- tools/migration/migrator_base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 61cdedf..aa88fa7 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -55,6 +55,7 @@ def __init__(self, config_data, debug=False, dry_run=False): self.logger.setLevel(logging.DEBUG) else: self.logger.setLevel(logging.INFO) + print("Logging level: %s" % self.logger.getEffectiveLevel()) self.dry_run = dry_run self.old, self.new = self.create_clients(config_data) self.versions_match = self.check_versions() From 1e401f9bd4cbd35224b5a8af3f4778525af4e412 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:49:49 +0100 Subject: [PATCH 31/79] Added more logging --- tools/migration/task_migrator.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 0966da0..d91bdca 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -34,8 +34,17 @@ def get_filtered_tasks(self): """ Returns a list comprising of the tasks of the old instance which are not yet migrated to the new instance. """ + self.logger.info("Checking if tasks have already been migrated (Might take a while).") old_tasks = self.get_from_old("tasks", parameters={"details": True}) - return list(filter(lambda task: not self.task_exists(task), old_tasks)) + not_migrated = [] + for task in old_tasks: + task_url = self.get_tasks_url(task) + self.logger.info("Checking for Task %s" % task_url) + if self.task_exists(task): + self.logger.info("Task %s has already been migrated." % task_url) + else: + not_migrated.append(task) + return not_migrated def adjust_task(self, task): """ Before adding the task to the new instance the ids of any connected findings need From 31ff7f0aca73572794cefd7b3a8b9cb785168c1a Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:51:04 +0100 Subject: [PATCH 32/79] Fixed id --- tools/migration/task_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index d91bdca..107ea0b 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -38,7 +38,7 @@ def get_filtered_tasks(self): old_tasks = self.get_from_old("tasks", parameters={"details": True}) not_migrated = [] for task in old_tasks: - task_url = self.get_tasks_url(task) + task_url = self.get_tasks_url(task["id"]) self.logger.info("Checking for Task %s" % task_url) if self.task_exists(task): self.logger.info("Task %s has already been migrated." % task_url) From 81f305bc92fa2b64f8a2231ce34daddc2db6af35 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:52:31 +0100 Subject: [PATCH 33/79] Logging --- tools/migration/task_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 107ea0b..3afb8ed 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -34,8 +34,8 @@ def get_filtered_tasks(self): """ Returns a list comprising of the tasks of the old instance which are not yet migrated to the new instance. """ - self.logger.info("Checking if tasks have already been migrated (Might take a while).") old_tasks = self.get_from_old("tasks", parameters={"details": True}) + self.logger.info("Checking %s tasks, if some have already been migrated (Might take a while)." % len(old_tasks)) not_migrated = [] for task in old_tasks: task_url = self.get_tasks_url(task["id"]) From 759234e46e631262b06ca002701386682aebf85e Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 11:54:10 +0100 Subject: [PATCH 34/79] Modified logging --- tools/migration/migrator_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index aa88fa7..16052d6 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -39,7 +39,7 @@ def create_logger(): logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) handler = logging.StreamHandler() - formatter = logging.Formatter("%(levelname)-8s %(message)s (%(filename)-0s:%(lineno)-0s)") + formatter = logging.Formatter("%(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) return logger From 6282c8e1df64afcc9fe879d0da82f34f15471fb8 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 12:34:12 +0100 Subject: [PATCH 35/79] Fixed wrong method call --- tools/migration/blacklist_migrator.py | 2 +- tools/migration/task_migrator.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index b2f3264..82d2aee 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -24,7 +24,7 @@ def migrate(self): new_id = self.get_matching_finding_id(old_id) if new_id is None: self.logger.warning("Could not match finding %s to new instance" % - self.get_findings_url(self.old, old_id)) + self.get_findings_url(old_id)) else: self.logger.info("Migrating blacklisted finding %s" % self.get_findings_url(old_id)) self.blacklist_finding(blacklist_info, new_id) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 3afb8ed..5e572e4 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -55,7 +55,7 @@ def adjust_task(self, task): matching_finding_id = self.get_matching_finding_id(finding["findingId"]) if matching_finding_id is None: self.logger.warn("The finding %s for the task %s does not exists on the new instance." % ( - self.get_findings_url(self.old, finding["findingId"]), task["id"])) + self.get_findings_url(finding["findingId"]), task["id"])) return False finding["findingId"] = matching_finding_id # If the id is 0, the backend will assign a valid new id From ddd2a378af66c7e0d01feb96a66d1ab4dbfba29c Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 13:05:01 +0100 Subject: [PATCH 36/79] Changed the task migration --- tools/migration/task_migrator.py | 58 +++++++------------------------- 1 file changed, 13 insertions(+), 45 deletions(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 5e572e4..36e0224 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -22,11 +22,9 @@ def migrate(self): self.logger.info("Migrating %s tasks" % len(old_tasks)) for old_task in old_tasks: old_task_id = old_task["id"] - if self.adjust_task(old_task): - self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) - self.add_task(old_task) - else: - self.logger.warning("Task %s could not be migrated" % self.get_tasks_url(old_task_id)) + self.adjust_task(old_task) + self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) + self.add_task(old_task) self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) @@ -35,12 +33,12 @@ def get_filtered_tasks(self): migrated to the new instance. """ old_tasks = self.get_from_old("tasks", parameters={"details": True}) - self.logger.info("Checking %s tasks, if some have already been migrated (Might take a while)." % len(old_tasks)) + self.logger.info("Checking %s tasks, if some have already been migrated." % len(old_tasks)) not_migrated = [] for task in old_tasks: task_url = self.get_tasks_url(task["id"]) self.logger.info("Checking for Task %s" % task_url) - if self.task_exists(task): + if self.task_migrated(task): self.logger.info("Task %s has already been migrated." % task_url) else: not_migrated.append(task) @@ -49,17 +47,15 @@ def get_filtered_tasks(self): def adjust_task(self, task): """ Before adding the task to the new instance the ids of any connected findings need to be changed to the corresponding findings on the new instance. - If any finding cannot be matched on the new instance `False` will be returned, `True` otherwise. + If the adjusting could not be done `False` will be returned, `True` otherwise. """ for finding in task["findings"]: matching_finding_id = self.get_matching_finding_id(finding["findingId"]) if matching_finding_id is None: self.logger.warn("The finding %s for the task %s does not exists on the new instance." % ( self.get_findings_url(finding["findingId"]), task["id"])) - return False - finding["findingId"] = matching_finding_id - # If the id is 0, the backend will assign a valid new id - task["id"] = 0 + else: + finding["findingId"] = matching_finding_id return True def get_tasks_url(self, task_id, client=None): @@ -73,38 +69,10 @@ def add_task(self, task): self.migrated += 1 self.put_in_new("tasks", path_suffix=str(task["id"]), data=task) - def task_exists(self, old_task): - """ Checks if the given tasks already exists on the new instance. """ - new_tasks = self.get_from_new("tasks", parameters={ - "author": old_task["author"], - "assignee": old_task["assignee"], - "tags": old_task["tags"], - "details": True - }) - - for new_task in new_tasks: - if self.superficial_match(new_task, old_task) and self.task_findings_match(new_task, old_task): - return True - return False - - def task_findings_match(self, new_task, old_task): - """ Checks if the findings of the given two tasks are the same. - Returns True if they are, False otherwise. - """ - new_findings = self.get_task_findings(self.new, new_task) - old_findings = self.get_task_findings(self.old, old_task) - - for old_finding in old_findings: - if not self.finding_match_in_list(old_finding, new_findings): - return False - return True - - def finding_match_in_list(self, finding, finding_list): - """ Checks whether there is a match for a finding in a list of findings. """ - for new_finding in finding_list: - if self.match_finding(finding, new_finding): - return True - return False + def task_migrated(self, old_task): + """ Checks if the given task was already migrated to the new instance. """ + new_task = self.get_from_new("tasks", path_suffix=old_task["id"]) + return self.superficial_match(new_task, old_task) def superficial_match(self, task1, task2): """ A quick check if two tasks are roughly the same. It checks the contents of some fields and @@ -115,7 +83,7 @@ def superficial_match(self, task1, task2): @staticmethod def task_to_list(task): """ Creates a simple string out of task with some of its field values. """ - return [task[x] for x in ["subject", "description"]] + return [task[x] for x in ["subject", "description", "author", "assignee", "tags"]] def get_task_findings(self, client, task): """ Returns the findings objects for a task (if it has any) """ From 623b54b8771d6818aaf417ee0f5a380f1e0192eb Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 13:06:30 +0100 Subject: [PATCH 37/79] Stringified path suffix --- tools/migration/migrator_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 16052d6..0cc4a4f 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -118,7 +118,7 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): Args: path_suffix(str): Will be added to the end of the project service URL """ - url = client.get_project_service_url(service) + path_suffix + url = client.get_project_service_url(service) + str(path_suffix) response = self.check_cache((url, parameters), use_cache) if response is None: From 086c7c29db0a1b4685e24f09004f1b286e0061cd Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 13:34:03 +0100 Subject: [PATCH 38/79] Changed the task migration --- tools/migration/migrator_base.py | 4 ++-- tools/migration/task_migrator.py | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 0cc4a4f..b130e48 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -127,9 +127,9 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): response = client.get(url, parameters).json() except ServiceError as e: status_code = e.response.status_code - if status_code in (500, 404): + if status_code == 500: self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) - elif status_code == 400: + elif status_code in (400, 404): raise self.cache_request((url, parameters), response, use_cache) return response diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 36e0224..8630580 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 from migrator_base import MigratorBase, get_arguments +from teamscale_client.data import ServiceError def main(): @@ -47,7 +48,6 @@ def get_filtered_tasks(self): def adjust_task(self, task): """ Before adding the task to the new instance the ids of any connected findings need to be changed to the corresponding findings on the new instance. - If the adjusting could not be done `False` will be returned, `True` otherwise. """ for finding in task["findings"]: matching_finding_id = self.get_matching_finding_id(finding["findingId"]) @@ -56,7 +56,6 @@ def adjust_task(self, task): self.get_findings_url(finding["findingId"]), task["id"])) else: finding["findingId"] = matching_finding_id - return True def get_tasks_url(self, task_id, client=None): """ Creates a url of the old instance to the task with the given id. """ @@ -71,7 +70,10 @@ def add_task(self, task): def task_migrated(self, old_task): """ Checks if the given task was already migrated to the new instance. """ - new_task = self.get_from_new("tasks", path_suffix=old_task["id"]) + try: + new_task = self.get_from_new("tasks", path_suffix=old_task["id"])["task"] + except ServiceError: + return False return self.superficial_match(new_task, old_task) def superficial_match(self, task1, task2): From b3133eebbd514be7ecd2cf1d5c4a82b0451081f5 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 13:52:11 +0100 Subject: [PATCH 39/79] Changed comparison of tasks --- tools/migration/task_migrator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 8630580..3aa9eff 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -80,7 +80,9 @@ def superficial_match(self, task1, task2): """ A quick check if two tasks are roughly the same. It checks the contents of some fields and the number of findings. """ - return self.task_to_list(task1) == self.task_to_list(task2) and len(task1["findings"]) == len(task2["findings"]) + fields_match = self.task_to_list(task1) == self.task_to_list(task2) + same_number_of_findings = len(task1["findings"]) == len(task2["findings"]) + return fields_match and same_number_of_findings @staticmethod def task_to_list(task): From 8e07e8f25eaba58759dc2ea278412563be7b7b8e Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 13:56:43 +0100 Subject: [PATCH 40/79] Removed task pre-filtering --- tools/migration/task_migrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 3aa9eff..1120186 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -15,7 +15,7 @@ class TaskMigrator(MigratorBase): """ def migrate(self): """ Migrates the tasks. """ - old_tasks = self.get_filtered_tasks() + old_tasks = self.get_from_old("tasks", parameters={"details": True}) if len(old_tasks) == 0: self.logger.info("No new tasks to migrate.") exit(1) From f702e91e253ad3e9686ae2e99f6f6e3aaf61313b Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 14:18:21 +0100 Subject: [PATCH 41/79] Added the step by step option --- tools/migration/blacklist_migrator.py | 1 + tools/migration/migrator_base.py | 13 +++++++-- tools/migration/task_migrator.py | 39 +-------------------------- 3 files changed, 13 insertions(+), 40 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index 82d2aee..6358010 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -28,6 +28,7 @@ def migrate(self): else: self.logger.info("Migrating blacklisted finding %s" % self.get_findings_url(old_id)) self.blacklist_finding(blacklist_info, new_id) + self.check_step() self.logger.info("Migrated %d/%d blacklisted findings" % (self.migrated, len(blacklist_infos))) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index b130e48..d08dd7c 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -16,8 +16,11 @@ def get_arguments(): parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log.") parser.add_argument("--dry-run", action="store_true", help="Dry-run for the migration. See what would happen " "without any consequences") + parser.add_argument("--step-by-step", action="store_true", help="Pauses between each migration. Can be used to " + "check for potential error without tainting " + "the complete data.") args = parser.parse_args() - return load_config_json(args.config), args.debug, args.dry_run + return load_config_json(args.config), args.debug, args.dry_run, args.step_by_step def load_config_json(path): @@ -49,7 +52,7 @@ class MigratorBase(ABC): """ Base class for migrating data from one instance to another via REST calls. """ logger = create_logger() - def __init__(self, config_data, debug=False, dry_run=False): + def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.debug = debug if self.debug: self.logger.setLevel(logging.DEBUG) @@ -57,6 +60,7 @@ def __init__(self, config_data, debug=False, dry_run=False): self.logger.setLevel(logging.INFO) print("Logging level: %s" % self.logger.getEffectiveLevel()) self.dry_run = dry_run + self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) self.versions_match = self.check_versions() self.migrated = 0 @@ -79,6 +83,11 @@ def create_clients(self, config_data): self.logger.exception("Creating the teamscale clients failed.") exit(1) + def check_step(self): + """ Checks if there should be a pause. """ + if self.step_by_step: + input("click to continue...") + def check_versions(self): """ Checks if the versions of both clients match. If not False will be returned and a warning will be logged. diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 1120186..a33a0bc 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -26,25 +26,9 @@ def migrate(self): self.adjust_task(old_task) self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) self.add_task(old_task) - + self.check_step() self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) - def get_filtered_tasks(self): - """ Returns a list comprising of the tasks of the old instance which are not yet - migrated to the new instance. - """ - old_tasks = self.get_from_old("tasks", parameters={"details": True}) - self.logger.info("Checking %s tasks, if some have already been migrated." % len(old_tasks)) - not_migrated = [] - for task in old_tasks: - task_url = self.get_tasks_url(task["id"]) - self.logger.info("Checking for Task %s" % task_url) - if self.task_migrated(task): - self.logger.info("Task %s has already been migrated." % task_url) - else: - not_migrated.append(task) - return not_migrated - def adjust_task(self, task): """ Before adding the task to the new instance the ids of any connected findings need to be changed to the corresponding findings on the new instance. @@ -68,27 +52,6 @@ def add_task(self, task): self.migrated += 1 self.put_in_new("tasks", path_suffix=str(task["id"]), data=task) - def task_migrated(self, old_task): - """ Checks if the given task was already migrated to the new instance. """ - try: - new_task = self.get_from_new("tasks", path_suffix=old_task["id"])["task"] - except ServiceError: - return False - return self.superficial_match(new_task, old_task) - - def superficial_match(self, task1, task2): - """ A quick check if two tasks are roughly the same. It checks the contents of some fields and - the number of findings. - """ - fields_match = self.task_to_list(task1) == self.task_to_list(task2) - same_number_of_findings = len(task1["findings"]) == len(task2["findings"]) - return fields_match and same_number_of_findings - - @staticmethod - def task_to_list(task): - """ Creates a simple string out of task with some of its field values. """ - return [task[x] for x in ["subject", "description", "author", "assignee", "tags"]] - def get_task_findings(self, client, task): """ Returns the findings objects for a task (if it has any) """ findings = [] From 95c78a42b9dd158f86165e7a7a26d8de34024a14 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 14:33:25 +0100 Subject: [PATCH 42/79] Removing print statement --- tools/migration/migrator_base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index d08dd7c..7625185 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -58,7 +58,6 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.logger.setLevel(logging.DEBUG) else: self.logger.setLevel(logging.INFO) - print("Logging level: %s" % self.logger.getEffectiveLevel()) self.dry_run = dry_run self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) From 10aa9562000a7c4d2cc6b2354153e476cc684bb3 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 14:40:08 +0100 Subject: [PATCH 43/79] Removed dead code and added a few comments --- tools/migration/blacklist_migrator.py | 1 + tools/migration/migrator_base.py | 18 +++++++++++++++++- tools/migration/task_migrator.py | 13 +------------ 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index 6358010..8ae70f5 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -3,6 +3,7 @@ def main(): + """ Starts the migration of blacklisted findings """ BlacklistMigrator(*get_arguments()).migrate() diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 7625185..baf121d 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -83,7 +83,7 @@ def create_clients(self, config_data): exit(1) def check_step(self): - """ Checks if there should be a pause. """ + """ If the step by step option is enabled the script pauses with this method. """ if self.step_by_step: input("click to continue...") @@ -124,7 +124,12 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): """ Performs a GET call from the client to the service with the given parameters and returns the response as a JSON Object. Args: + client(TeamscaleClient): Teamscale client + service(str): Id of the service path_suffix(str): Will be added to the end of the project service URL + parameters(dict): Dict with parameters which should be appended to the URL + use_cache(bool): If true, the call will be cached and subsequently similar calls will get the same + response """ url = client.get_project_service_url(service) + str(path_suffix) @@ -146,7 +151,11 @@ def get_from_old(self, service, path_suffix="", parameters=None, use_cache=True) """ Performs a GET call with the given information on the instance from which the data should be migrated and returns the response as a JSON Object. Args: + service(str): Id of the service path_suffix(str): Will be added to the end of the project service URL + parameters(dict): Dict with parameters which should be appended to the URL + use_cache(bool): If true, the call will be cached and subsequently similar calls will get the same + response """ return self.get(self.old, service, path_suffix, parameters, use_cache) @@ -154,7 +163,11 @@ def get_from_new(self, service, path_suffix="", parameters=None, use_cache=True) """ Performs a GET call with the given information on the instance to which the data should be migrated and returns the response as a JSON Object. Args: + service(str): Id of the service path_suffix(str): Will be added to the end of the project service URL + parameters(dict): Dict with parameters which should be appended to the URL + use_cache(bool): If true, the call will be cached and subsequently similar calls will get the same + response """ return self.get(self.new, service, path_suffix, parameters, use_cache) @@ -162,7 +175,10 @@ def put_in_new(self, service, data, path_suffix="", parameters=None): """ Performs a PUT call from the client to the service with the given parameters and data. The path parameter are additions to the path, e.g /service/id. Args: + service(str): Id of the service + data(dict): Data which will be converted to JSON and sent to the server path_suffix(str): Will be added to the end of the project service URL + parameters(dict): Dict with parameters which should be appended to the URL """ if not self.dry_run: self.new.put(self.new.get_project_service_url(service) + path_suffix, diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index a33a0bc..5e68586 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 from migrator_base import MigratorBase, get_arguments -from teamscale_client.data import ServiceError def main(): @@ -36,7 +35,7 @@ def adjust_task(self, task): for finding in task["findings"]: matching_finding_id = self.get_matching_finding_id(finding["findingId"]) if matching_finding_id is None: - self.logger.warn("The finding %s for the task %s does not exists on the new instance." % ( + self.logger.warn("The finding %s for task %s does not exists on the new instance." % ( self.get_findings_url(finding["findingId"]), task["id"])) else: finding["findingId"] = matching_finding_id @@ -52,16 +51,6 @@ def add_task(self, task): self.migrated += 1 self.put_in_new("tasks", path_suffix=str(task["id"]), data=task) - def get_task_findings(self, client, task): - """ Returns the findings objects for a task (if it has any) """ - findings = [] - for entry in task["findings"]: - finding = self.get_finding_by_id(client, entry["findingId"]) - if finding is None: - continue - findings.append(finding) - return findings - if __name__ == "__main__": main() From 254954b2964511246f4a8c2c4dfd64877b82bd48 Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 13 Feb 2018 14:59:55 +0100 Subject: [PATCH 44/79] Added a check if a project exists --- tools/migration/blacklist_migrator.py | 9 ++------- tools/migration/migrator_base.py | 12 ++++++++++++ tools/migration/task_migrator.py | 1 + 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index 8ae70f5..f97fb1b 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -24,8 +24,7 @@ def migrate(self): old_id = blacklist_info["findingId"] new_id = self.get_matching_finding_id(old_id) if new_id is None: - self.logger.warning("Could not match finding %s to new instance" % - self.get_findings_url(old_id)) + self.logger.warning("Could not match finding %s to new instance" % self.get_findings_url(old_id)) else: self.logger.info("Migrating blacklisted finding %s" % self.get_findings_url(old_id)) self.blacklist_finding(blacklist_info, new_id) @@ -36,11 +35,7 @@ def migrate(self): def get_blacklist_infos(self): """ Returns all blacklist info objects from the old instance. """ # Remove findings which have already been migrated and have the same id - blacklisted_ids = set(self.get_from_old("finding-blacklist")) - if len(blacklisted_ids) == 0: - self.logger.info("Old instance does not have any blacklisted findings") - exit(1) - blacklisted_ids -= set(self.get_from_new("finding-blacklist")) + blacklisted_ids = set(self.get_from_old("finding-blacklist")) - set(self.get_from_new("finding-blacklist")) infos = [] for finding_id in blacklisted_ids: diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index baf121d..54524e2 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -62,12 +62,24 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) self.versions_match = self.check_versions() + self.check_projects() self.migrated = 0 self.cache = {} if self.debug: self.logger.debug("Debug Mode ON") + def check_projects(self): + """ Check if the two project actually do exist on the given servers. """ + try: + check_url = "{0.url}/create-project/{0.project}" + self.old.get(check_url.format(self.old)) + self.new.get(check_url.format(self.new)) + except ServiceError as e: + project_name = str(e.response.url).split("/")[-1] + self.logger.error("Project '%s' does not exist" % project_name) + exit(1) + def create_clients(self, config_data): """ Reads the given config defined by its path and creates the two teamscale clients from it. One old instance (migrating from) and a new onoe (migrating to). diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 5e68586..3caa64a 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -26,6 +26,7 @@ def migrate(self): self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) self.add_task(old_task) self.check_step() + self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) def adjust_task(self, task): From 7395a894d55c793a08758ea8ed48554990262d6e Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 22 Feb 2018 14:40:15 +0100 Subject: [PATCH 45/79] Added a scripts which allows the batch migration of blacklists and tasks --- tools/migration/batch_config.template | 16 +++++++++ tools/migration/batch_migrator.py | 47 +++++++++++++++++++++++++++ tools/migration/migrator_base.py | 2 +- 3 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 tools/migration/batch_config.template create mode 100644 tools/migration/batch_migrator.py diff --git a/tools/migration/batch_config.template b/tools/migration/batch_config.template new file mode 100644 index 0000000..42fc146 --- /dev/null +++ b/tools/migration/batch_config.template @@ -0,0 +1,16 @@ +{ + "old_instance" : { + "url": "http://localhost:8080", + "user": "user", + "token": "tokentoken" + }, + "new_instance" : { + "url": "http://localhost:8080", + "user": "user", + "token": "tokentoken" + }, + "project_mappings": [ + {"from": "a", "to": "b"}, + {"from": "c", "to": "d"} + ] +} \ No newline at end of file diff --git a/tools/migration/batch_migrator.py b/tools/migration/batch_migrator.py new file mode 100644 index 0000000..02acbe1 --- /dev/null +++ b/tools/migration/batch_migrator.py @@ -0,0 +1,47 @@ +import argparse +import json + +from pathlib import Path +from task_migrator import TaskMigrator +from blacklist_migrator import BlacklistMigrator + + +def main(): + """ Migrates the blacklists and tasks of multiple projects from one server to the projects to the other. + It automatically reads the arguments from the command line. """ + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " + "see batch_config.template.") + args = parser.parse_args() + + config_file = Path(args.config) + if not config_file.exists(): + print("Config file does not exist") + + with config_file.open() as file: + data = json.load(file) + base_config = {x: data[x] for x in ["old_instance", "new_instance"]} + + for mapping in data["project_mappings"]: + migrate(base_config, mapping) + + +def migrate(base_config, mapping): + """ Migrates the blacklist and the tasks of between the projects defined in the mapping. + The servers containing the project are defined in the base config. + """ + if not all(key in mapping for key in ("from", "to")): + print("Project mapping is malformed: %s" % mapping) + return None + + base_config["old_instance"]["project"] = mapping["from"] + base_config["new_instance"]["project"] = mapping["to"] + + print("Migrating from '{from}' to '{to}'".format(**mapping)) + BlacklistMigrator(base_config).migrate() + TaskMigrator(base_config).migrate() + print("") + + +if __name__ == "__main__": + main() diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 54524e2..524ac18 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -10,7 +10,7 @@ def get_arguments(): """ Parses the arguments for the migration tool. """ - parser = argparse.ArgumentParser(description="test", formatter_class=argparse.RawTextHelpFormatter) + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " "see config.template.") parser.add_argument("--debug", action="store_true", help="The debug option which enables debug log.") From c89a099c7c046c48ec86e5358721bfd0fa31103a Mon Sep 17 00:00:00 2001 From: Pawelka Timo - Munich-MR - external Date: Thu, 22 Feb 2018 14:42:41 +0100 Subject: [PATCH 46/79] Changed output and made it clearer --- tools/migration/blacklist_migrator.py | 2 +- tools/migration/migrator_base.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index f97fb1b..7280035 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -35,7 +35,7 @@ def migrate(self): def get_blacklist_infos(self): """ Returns all blacklist info objects from the old instance. """ # Remove findings which have already been migrated and have the same id - blacklisted_ids = set(self.get_from_old("finding-blacklist")) - set(self.get_from_new("finding-blacklist")) + blacklisted_ids = set(self.get_from_old("finding-blacklist")) infos = [] for finding_id in blacklisted_ids: diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 54524e2..c9de184 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -77,7 +77,7 @@ def check_projects(self): self.new.get(check_url.format(self.new)) except ServiceError as e: project_name = str(e.response.url).split("/")[-1] - self.logger.error("Project '%s' does not exist" % project_name) + self.logger.exception("Project '%s' does not exist" % project_name) exit(1) def create_clients(self, config_data): @@ -206,7 +206,7 @@ def get_matching_finding_id(self, finding_id): if finding is None: return None - new_findings = self.get_from_new("findings", path_suffix=finding["location"]["uniformPath"]) + new_findings = self.get_from_new("findings", path_suffix=finding["location"]["uniformPath"], parameters={"blacklisted" : "all"}) for new_finding in new_findings: if self.match_finding(new_finding, finding): return new_finding["id"] From 7efce701a9c1bd00290b9da925af57b2a319b685 Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 22 Feb 2018 14:55:06 +0100 Subject: [PATCH 47/79] Added logging% --- tools/migration/batch_migrator.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tools/migration/batch_migrator.py b/tools/migration/batch_migrator.py index 02acbe1..c6ae31e 100644 --- a/tools/migration/batch_migrator.py +++ b/tools/migration/batch_migrator.py @@ -4,6 +4,7 @@ from pathlib import Path from task_migrator import TaskMigrator from blacklist_migrator import BlacklistMigrator +from migrator_base import create_logger def main(): @@ -13,34 +14,33 @@ def main(): parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " "see batch_config.template.") args = parser.parse_args() - + logger = create_logger() config_file = Path(args.config) if not config_file.exists(): - print("Config file does not exist") + logger.error("Config file does not exist") with config_file.open() as file: data = json.load(file) base_config = {x: data[x] for x in ["old_instance", "new_instance"]} for mapping in data["project_mappings"]: - migrate(base_config, mapping) + migrate(base_config, mapping, logger) -def migrate(base_config, mapping): +def migrate(base_config, mapping, logger): """ Migrates the blacklist and the tasks of between the projects defined in the mapping. The servers containing the project are defined in the base config. """ if not all(key in mapping for key in ("from", "to")): - print("Project mapping is malformed: %s" % mapping) + logger.error("Project mapping is malformed: %s" % mapping) return None base_config["old_instance"]["project"] = mapping["from"] base_config["new_instance"]["project"] = mapping["to"] - print("Migrating from '{from}' to '{to}'".format(**mapping)) + logger.info("Migrating from '{from}' to '{to}'".format(**mapping)) BlacklistMigrator(base_config).migrate() TaskMigrator(base_config).migrate() - print("") if __name__ == "__main__": From 6871d063c5167424fb27fad0991e7cee17bb31cb Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 22 Feb 2018 15:02:36 +0100 Subject: [PATCH 48/79] Fixed the double logging --- tools/migration/batch_migrator.py | 2 +- tools/migration/migrator_base.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/migration/batch_migrator.py b/tools/migration/batch_migrator.py index c6ae31e..c9f318f 100644 --- a/tools/migration/batch_migrator.py +++ b/tools/migration/batch_migrator.py @@ -14,7 +14,7 @@ def main(): parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " "see batch_config.template.") args = parser.parse_args() - logger = create_logger() + logger = create_logger(name="batch") config_file = Path(args.config) if not config_file.exists(): logger.error("Config file does not exist") diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 524ac18..0156f1d 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -37,9 +37,9 @@ def load_config_json(path): exit(1) -def create_logger(): +def create_logger(name="migrator"): """ Creates a logger """ - logger = logging.getLogger(__name__) + logger = logging.getLogger(name) logger.setLevel(logging.INFO) handler = logging.StreamHandler() formatter = logging.Formatter("%(levelname)-8s %(message)s") From 56de09b5e90b5d89229f10a1d363836c0ea33a0a Mon Sep 17 00:00:00 2001 From: pawelka Date: Tue, 6 Mar 2018 10:11:56 +0100 Subject: [PATCH 49/79] Changed according to review --- tools/migration/blacklist_migrator.py | 3 +-- tools/migration/migrator_base.py | 22 ++++++++++++---------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/tools/migration/blacklist_migrator.py b/tools/migration/blacklist_migrator.py index 7280035..18742c9 100755 --- a/tools/migration/blacklist_migrator.py +++ b/tools/migration/blacklist_migrator.py @@ -17,7 +17,7 @@ def migrate(self): blacklist_infos = self.get_blacklist_infos() if len(blacklist_infos) == 0: self.logger.info("No new blacklisted findings to migrate") - exit(1) + exit(0) self.logger.info("Migrating %s blacklisted findings" % len(blacklist_infos)) for blacklist_info in blacklist_infos: @@ -34,7 +34,6 @@ def migrate(self): def get_blacklist_infos(self): """ Returns all blacklist info objects from the old instance. """ - # Remove findings which have already been migrated and have the same id blacklisted_ids = set(self.get_from_old("finding-blacklist")) infos = [] diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 3a6ba3d..3414127 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -71,13 +71,15 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): def check_projects(self): """ Check if the two project actually do exist on the given servers. """ - try: - check_url = "{0.url}/create-project/{0.project}" - self.old.get(check_url.format(self.old)) - self.new.get(check_url.format(self.new)) - except ServiceError as e: - project_name = str(e.response.url).split("/")[-1] - self.logger.exception("Project '%s' does not exist" % project_name) + self.check_project(self.old) + self.check_project(self.new) + + def check_project(self, client): + """ Checks if the project specified in the client actually exists on that client. """ + check_url = "{0.url}/projects/{0.project}" + result = client.get(check_url.format(client)) + if result.content == b'null': + self.logger.error("Project '%s' does not exist" % client.project) exit(1) def create_clients(self, config_data): @@ -152,10 +154,10 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): response = client.get(url, parameters).json() except ServiceError as e: status_code = e.response.status_code - if status_code == 500: - self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) - elif status_code in (400, 404): + if status_code in (400, 404): raise + else: + self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) self.cache_request((url, parameters), response, use_cache) return response From 151424557e738a08b10470ceb69e716b9e9c279b Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Mar 2018 10:08:12 +0100 Subject: [PATCH 50/79] Added path prefix transformation --- tests/task_migrator_test.py | 62 -------------------------------- tools/migration/config | 18 ++++++++++ tools/migration/migrator_base.py | 16 ++++++++- 3 files changed, 33 insertions(+), 63 deletions(-) delete mode 100644 tests/task_migrator_test.py create mode 100644 tools/migration/config diff --git a/tests/task_migrator_test.py b/tests/task_migrator_test.py deleted file mode 100644 index 1137fed..0000000 --- a/tests/task_migrator_test.py +++ /dev/null @@ -1,62 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - -import responses -from tools.migration.task_migrator import TaskMigrator -from test_utils import get_global_service_mock -from copy import deepcopy - -URL = "http://localhost:8080" -CONFIG = { - "old_instance": { - "url": URL, - "project": "old", - "user": "admin", - "token": "token" - }, - "new_instance": { - "url": URL, - "project": "new", - "user": "admin", - "token": "token" - } -} - - -class TestTaskMigrator: - """ Simple class for bundling the test for the task migration. """ - @staticmethod - def get_migrator(config): - """ - Returns a task migrator with the given config. - For an example config look at CONFIG - """ - TestTaskMigrator.create_necessary_client_responses(URL) - return TaskMigrator(config, False) - - @staticmethod - def create_necessary_client_responses(url, version=40000): - """ Creates responses which are necessary to create a client """ - responses.add(responses.GET, get_global_service_mock(url, "service-api-info"), - status=200, content_type="application/json", body='{ "apiVersion": 3 }') - responses.add(responses.GET, get_global_service_mock(url, "health-metrics"), - status=200, content_type="text/plain", body="version %s 0" % version) - - def get_default_migrator(self): - """ Returns the migrator with the default settings """ - return self.get_migrator(CONFIG) - - @responses.activate - def test_different_versions(self, caplog): - """ Tests the case where we want to migrate between two TS-instances with a different - version. A warning should be logged and the version_match flag should be False. - """ - config = deepcopy(CONFIG) - new_url = "http://localhost:8081" - config["new_instance"]["url"] = new_url - self.create_necessary_client_responses(new_url, version=30000) - migrator = self.get_migrator(config) - - warning = list(filter(lambda x: x.levelname == "WARNING" and "version" in x.message, caplog.records)) - assert len(warning) == 1, "Missing warning about version mismatch" - assert not migrator.versions_match, "Flag 'versions_match' should be False" diff --git a/tools/migration/config b/tools/migration/config new file mode 100644 index 0000000..c9fee95 --- /dev/null +++ b/tools/migration/config @@ -0,0 +1,18 @@ +{ + "old_instance" : { + "url": "http://localhost:8080", + "project": "old", + "user": "admin", + "token": "jN3doIhUUHCb2cklIQGi2pywEYtwV0Fe", + "path_prefix_transformation" : { + "from" : "test/", + "to" : "" + } + }, + "new_instance" : { + "url": "http://localhost:8080", + "project": "new", + "user": "admin", + "token": "jN3doIhUUHCb2cklIQGi2pywEYtwV0Fe" + } +} \ No newline at end of file diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 3414127..f47a67b 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -1,6 +1,7 @@ import json import argparse import logging +import re from abc import ABC, abstractmethod from pathlib import Path from teamscale_client import TeamscaleClient @@ -61,6 +62,8 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.dry_run = dry_run self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) + self.set_prefix_transformations(self.old, config_data["old_instance"]) + self.set_prefix_transformations(self.new, config_data["new_instance"]) self.versions_match = self.check_versions() self.check_projects() self.migrated = 0 @@ -69,6 +72,16 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): if self.debug: self.logger.debug("Debug Mode ON") + @staticmethod + def set_prefix_transformations(client, config_data): + """ Sets the path prefix transformations for the instances. """ + key = "path_prefix_transformation" + if key in config_data and all(x in config_data[key] for x in ["from", "to"]): + client.path_transform_regex = re.compile("^" + config_data[key]["from"]) + client.path_transform = lambda x: client.path_transform_regex.sub(config_data[key]["to"], x) + else: + client.path_transform = lambda x: x + def check_projects(self): """ Check if the two project actually do exist on the given servers. """ self.check_project(self.old) @@ -208,7 +221,8 @@ def get_matching_finding_id(self, finding_id): if finding is None: return None - new_findings = self.get_from_new("findings", path_suffix=finding["location"]["uniformPath"], parameters={"blacklisted" : "all"}) + location = self.new.path_transform(finding["location"]["uniformPath"]) + new_findings = self.get_from_new("findings", path_suffix=location, parameters={"blacklisted": "all"}) for new_finding in new_findings: if self.match_finding(new_finding, finding): return new_finding["id"] From 44f8a5da7fcb40bd814c12589b698866b537f1ce Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Mar 2018 10:12:11 +0100 Subject: [PATCH 51/79] Deleted the wrong config file --- tools/migration/config | 18 ------------------ tools/migration/config.template | 6 +++++- 2 files changed, 5 insertions(+), 19 deletions(-) delete mode 100644 tools/migration/config diff --git a/tools/migration/config b/tools/migration/config deleted file mode 100644 index c9fee95..0000000 --- a/tools/migration/config +++ /dev/null @@ -1,18 +0,0 @@ -{ - "old_instance" : { - "url": "http://localhost:8080", - "project": "old", - "user": "admin", - "token": "jN3doIhUUHCb2cklIQGi2pywEYtwV0Fe", - "path_prefix_transformation" : { - "from" : "test/", - "to" : "" - } - }, - "new_instance" : { - "url": "http://localhost:8080", - "project": "new", - "user": "admin", - "token": "jN3doIhUUHCb2cklIQGi2pywEYtwV0Fe" - } -} \ No newline at end of file diff --git a/tools/migration/config.template b/tools/migration/config.template index bab0729..a2f2ccd 100644 --- a/tools/migration/config.template +++ b/tools/migration/config.template @@ -3,7 +3,11 @@ "url": "http://localhost:8080", "project": "a", "user": "user", - "token": "tokentoken" + "token": "tokentoken", + "path_prefix_transformation" : { + "from" : "test/", + "to" : "" + } }, "new_instance" : { "url": "http://localhost:8080", From 7c46c2cb95e2146282b1e6868d5e3c1e07c2bc29 Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Mar 2018 11:54:52 +0100 Subject: [PATCH 52/79] Changed findings comparison --- tools/migration/migrator_base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index f47a67b..09e4dde 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -248,7 +248,12 @@ def get_findings_url(self, findings_id, client=None): def match_finding(self, finding1, finding2): """ Checks if the given two findings are the same. This is done by comparing their location and message. If the version of the two TS instances don't match, only the location is compared """ - location_match = finding1["location"] == finding2["location"] + # Uniform path should not be checked as if could be different with a path transformation + f1_loc = finding1["location"] + f2_loc = finding2["location"] + entries = ["rawStartOffset", "rawEndOffset", "rawStartLine", "rawEndLine"] + location_match = all([(f1_loc[x] == f2_loc[x]) for x in entries]) + message_match = finding1["message"] == finding2["message"] return location_match and (message_match or not self.versions_match) From 1410c86ddf160472b6b38458d514110b99efbb67 Mon Sep 17 00:00:00 2001 From: pawelka Date: Wed, 7 Mar 2018 12:10:26 +0100 Subject: [PATCH 53/79] Path transformation fix --- tools/migration/config.template | 12 ++++++------ tools/migration/migrator_base.py | 14 ++++++-------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/tools/migration/config.template b/tools/migration/config.template index a2f2ccd..5a93eea 100644 --- a/tools/migration/config.template +++ b/tools/migration/config.template @@ -3,16 +3,16 @@ "url": "http://localhost:8080", "project": "a", "user": "user", - "token": "tokentoken", - "path_prefix_transformation" : { - "from" : "test/", - "to" : "" - } + "token": "tokentoken" }, "new_instance" : { "url": "http://localhost:8080", "project": "b", "user": "user", - "token": "tokentoken" + "token": "tokentoken", + "path_prefix_transformation" : { + "from" : "test/", + "to" : "" + } } } \ No newline at end of file diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 09e4dde..6c922dc 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -62,8 +62,7 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.dry_run = dry_run self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) - self.set_prefix_transformations(self.old, config_data["old_instance"]) - self.set_prefix_transformations(self.new, config_data["new_instance"]) + self.set_prefix_transformations(config_data) self.versions_match = self.check_versions() self.check_projects() self.migrated = 0 @@ -72,15 +71,14 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): if self.debug: self.logger.debug("Debug Mode ON") - @staticmethod - def set_prefix_transformations(client, config_data): + def set_prefix_transformations(self, config_data): """ Sets the path prefix transformations for the instances. """ key = "path_prefix_transformation" if key in config_data and all(x in config_data[key] for x in ["from", "to"]): - client.path_transform_regex = re.compile("^" + config_data[key]["from"]) - client.path_transform = lambda x: client.path_transform_regex.sub(config_data[key]["to"], x) + regex = re.compile("^" + config_data[key]["from"]) + self.path_transform = lambda x: regex.sub(config_data[key]["to"], x) else: - client.path_transform = lambda x: x + self.path_transform = lambda x: x def check_projects(self): """ Check if the two project actually do exist on the given servers. """ @@ -221,7 +219,7 @@ def get_matching_finding_id(self, finding_id): if finding is None: return None - location = self.new.path_transform(finding["location"]["uniformPath"]) + location = self.path_transform(finding["location"]["uniformPath"]) new_findings = self.get_from_new("findings", path_suffix=location, parameters={"blacklisted": "all"}) for new_finding in new_findings: if self.match_finding(new_finding, finding): From e2039ee7828e0cd9e28e38dc480fd8da7f427fd9 Mon Sep 17 00:00:00 2001 From: pawelka Date: Mon, 12 Mar 2018 16:08:36 +0100 Subject: [PATCH 54/79] Removed version check --- tools/migration/migrator_base.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 6c922dc..4834c25 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -63,7 +63,6 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.step_by_step = step_by_step self.old, self.new = self.create_clients(config_data) self.set_prefix_transformations(config_data) - self.versions_match = self.check_versions() self.check_projects() self.migrated = 0 self.cache = {} @@ -112,18 +111,6 @@ def check_step(self): if self.step_by_step: input("click to continue...") - def check_versions(self): - """ Checks if the versions of both clients match. If not False will be returned - and a warning will be logged. - """ - old_version = self.old.get_version() - new_version = self.new.get_version() - if old_version != new_version: - self.logger.warning("Teamscale versions of the old (%s) and new (%s) instance differ!" % - (old_version, new_version)) - return False - return True - @staticmethod def get_client(data): """ Creates a teamscale client from the given data """ @@ -244,8 +231,7 @@ def get_findings_url(self, findings_id, client=None): return "{0.url}/findings.html#details/{0.project}/?id={1}".format(client, findings_id) def match_finding(self, finding1, finding2): - """ Checks if the given two findings are the same. This is done by comparing their location and message. - If the version of the two TS instances don't match, only the location is compared """ + """ Checks if the given two findings are the same. This is done by comparing their location and message. """ # Uniform path should not be checked as if could be different with a path transformation f1_loc = finding1["location"] f2_loc = finding2["location"] @@ -253,7 +239,7 @@ def match_finding(self, finding1, finding2): location_match = all([(f1_loc[x] == f2_loc[x]) for x in entries]) message_match = finding1["message"] == finding2["message"] - return location_match and (message_match or not self.versions_match) + return location_match or message_match @abstractmethod def migrate(self): From 5eea5d8ce2ead77898de3e950ca9c79193eff04b Mon Sep 17 00:00:00 2001 From: pawelka Date: Mon, 12 Mar 2018 16:19:37 +0100 Subject: [PATCH 55/79] Made findings comparision more robust --- tools/migration/migrator_base.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 4834c25..d07d96c 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -235,10 +235,14 @@ def match_finding(self, finding1, finding2): # Uniform path should not be checked as if could be different with a path transformation f1_loc = finding1["location"] f2_loc = finding2["location"] - entries = ["rawStartOffset", "rawEndOffset", "rawStartLine", "rawEndLine"] - location_match = all([(f1_loc[x] == f2_loc[x]) for x in entries]) + entries = ["rawStartLine", "rawEndLine"] + + try: + location_match = all([(f1_loc[x] == f2_loc[x]) for x in entries]) + message_match = finding1["message"] == finding2["message"] + except KeyError: + return False - message_match = finding1["message"] == finding2["message"] return location_match or message_match @abstractmethod From 738f3229ed859c6ca493f636f9849b4763ecd4f5 Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 22 Mar 2018 13:53:05 +0100 Subject: [PATCH 56/79] Added a description on how to use the migration tools --- tools/migration/README.md | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 tools/migration/README.md diff --git a/tools/migration/README.md b/tools/migration/README.md new file mode 100644 index 0000000..b74f182 --- /dev/null +++ b/tools/migration/README.md @@ -0,0 +1,31 @@ +# Migration tools +Collection of tools which can be used to migrate data between to teamscale instances. +Up to now there are three scripts you can use: +- blacklist_migrator.py +- task_migratior.py +- batch_migrator.py + +The first two scripts are pretty self-explanatory. +Use `config.template` to create your own configuration and give the path to the config file as a parameter +when calling either migrator (task or blacklist). +1If there is any kind of `path (suffix) transformation` on either project adjust the optional parameter +`path_prefix_transformation`. This feature is not very in depth, so if you are coming up to extreme example where +both projects have multiple transformation you might need to extend it. + +## Batch Migration +Both scripts only work for one project at a time. If you want to migrate the blacklist and the tasks of +a bunch of projects you can use the batch migrator script. +For the configuration use `batch_config.template`. +NOTE: The scripts assumes, that all "from" projects are on the "old instance" and all "to" projects are on +the new one. + +## Running the scripts +As of now, if you want to run the scripts, you musn't install the `teamscale-client` with `pip`, but add the +root directory of the `teamscale-client-python` to the `PYTHONPATH`. +```bash +export PYTHONPATH="$PYTHONPATH:/home//workspace/teamscale-client-python" +``` +or for windows +```bash +set PYTHONPATH=%PYTHONPATH%;/home//workspace/teamscale-client-python +``` From ee49bb1d74b1bb2ea4ae84d2323b6bdff348fcba Mon Sep 17 00:00:00 2001 From: pawelka Date: Fri, 23 Mar 2018 13:01:05 +0100 Subject: [PATCH 57/79] tmp --- tools/migration/migrator_base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index d07d96c..e961da5 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -245,6 +245,10 @@ def match_finding(self, finding1, finding2): return location_match or message_match + def match_location(self, finding1, finding2): + f1_loc = finding1["location"] + f2_loc = finding2["location"] + @abstractmethod def migrate(self): """ Migrates the date from the old instance to the new one """ From 6ad3205cd472eff87f515319e83dbae9d83c5f2b Mon Sep 17 00:00:00 2001 From: pawelka Date: Fri, 23 Mar 2018 14:21:32 +0100 Subject: [PATCH 58/79] Changed the findings comparison --- tools/__init__.py | 0 tools/migration/migrator_base.py | 30 ++++++++++++++++-------------- 2 files changed, 16 insertions(+), 14 deletions(-) delete mode 100644 tools/__init__.py diff --git a/tools/__init__.py b/tools/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index e961da5..d172486 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -231,23 +231,25 @@ def get_findings_url(self, findings_id, client=None): return "{0.url}/findings.html#details/{0.project}/?id={1}".format(client, findings_id) def match_finding(self, finding1, finding2): - """ Checks if the given two findings are the same. This is done by comparing their location and message. """ - # Uniform path should not be checked as if could be different with a path transformation - f1_loc = finding1["location"] - f2_loc = finding2["location"] - entries = ["rawStartLine", "rawEndLine"] + """ Checks if the given two findings are the same. """ + location_match = self.dicts_match(finding1["location"], + finding2["location"], + ["location", "uniformPath", "@class"]) + properties_match = self.dicts_match(finding1, finding2, + ["location", "id", "birth", "analysisTimestamp"]) - try: - location_match = all([(f1_loc[x] == f2_loc[x]) for x in entries]) - message_match = finding1["message"] == finding2["message"] - except KeyError: - return False + return location_match and properties_match - return location_match or message_match + @staticmethod + def dicts_match(dict1, dict2, excludes): + """ Checks if the given two dicts matches. + Excludes is a list containing all keys, which should not be compared + """ + if dict1.keys() != dict2.keys(): + return False - def match_location(self, finding1, finding2): - f1_loc = finding1["location"] - f2_loc = finding2["location"] + location_keys = [x for x in dict1.keys() if x not in excludes] + return all([dict1[x] == dict2[x]] for x in location_keys) @abstractmethod def migrate(self): From 88fc7cb243871c1fef1c3334fec2282c1c4c71c6 Mon Sep 17 00:00:00 2001 From: pawelka Date: Fri, 23 Mar 2018 14:27:03 +0100 Subject: [PATCH 59/79] Reset client to master --- teamscale_client/client.py | 58 ++++++++++++++++++-------------------- 1 file changed, 27 insertions(+), 31 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 01f884a..4f1aa58 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -22,10 +22,8 @@ class TeamscaleClient: username (str): The username to use for authentication access_token (str): The IDE access token to use for authentication project (str): The id of the project on which to work - sslverify: See requests' verify parameter in - http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification - timeout (float): TTFB timeout in seconds, - see http://docs.python-requests.org/en/master/user/quickstart/#timeouts + sslverify: See requests' verify parameter in http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification + timeout (float): TTFB timeout in seconds, see http://docs.python-requests.org/en/master/user/quickstart/#timeouts branch: The branch name for which to upload/retrieve data """ @@ -47,16 +45,16 @@ def check_api_version(self): """ url = self.get_global_service_url('service-api-info') response = self.get(url) - api_version = response.json()['apiVersion'] - if api_version < 3: + apiVersion = response.json()['apiVersion'] + if apiVersion < 3: raise ServiceError("Server api version " + str( - api_version) + " too low and not compatible. This client requires Teamscale 3.2 or newer.") + apiVersion) + " too low and not compatible. This client requires Teamscale 3.2 or newer."); def get(self, url, parameters=None): """Sends a GET request to the given service url. Args: - url (str): The URL for which to execute a GET request + url (str): The URL for which to execute a PUT request parameters (dict): parameters to attach to the url Returns: @@ -69,7 +67,7 @@ def get(self, url, parameters=None): response = requests.get(url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("GET", url, response) + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response def put(self, url, json=None, parameters=None, data=None): @@ -87,12 +85,12 @@ def put(self, url, json=None, parameters=None, data=None): Raises: ServiceError: If anything goes wrong """ - headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} + headers = {'Accept': 'application/json','Content-Type': 'application/json'} response = requests.put(url, params=parameters, json=json, data=data, headers=headers, auth=self.auth_header, verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("PUT", url, response) + raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response def delete(self, url, parameters=None): @@ -111,7 +109,7 @@ def delete(self, url, parameters=None): response = requests.delete(url, params=parameters, auth=self.auth_header, verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("PUT", url, response) + raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response def add_findings_group(self, name, mapping_pattern): @@ -220,6 +218,7 @@ def add_metric_descriptions(self, metric_descriptions): service_url = self.get_global_service_url("external-metric") return self.put(service_url, data=to_json(metric_descriptions)) + def upload_coverage_data(self, coverage_files, coverage_format, timestamp, message, partition): """Upload coverage reports to Teamscale. It is expected that the given coverage report files can be read from the filesystem. @@ -267,14 +266,14 @@ def upload_report(self, report_files, report_format, timestamp, message, partiti response = requests.post(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, files=multiple_files, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("POST", service_url, response) + raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return response def upload_architectures(self, architectures, timestamp, message): """Upload architectures to Teamscale. It is expected that the given architectures can be be read from the filesystem. Args: - architectures (dict): mapping of teamscale paths to architecture files that should be uploaded. Files must be readable. + architectures (dict): mappping of teamscale paths to architecture files that should be uploaded. Files must be readable. timestamp (datetime.datetime): timestamp for which to upload the data message (str): The message to use for the generated upload commit @@ -293,7 +292,7 @@ def upload_architectures(self, architectures, timestamp, message): response = requests.post(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, files=architecture_files, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("GET", service_url, response) + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return response def upload_non_code_metrics(self, metrics, timestamp, message, partition): @@ -330,7 +329,7 @@ def get_baselines(self): response = requests.get(service_url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, timeout=self.timeout) if response.status_code != 200: - raise ServiceError("GET", service_url, response) + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return [Baseline(x['name'], x['description'], timestamp=x['timestamp']) for x in response.json()] def delete_baseline(self, baseline_name): @@ -384,11 +383,6 @@ def get_projects(self): creation_timestamp=x['creationTimestamp'], alias=x.get('alias'), deleting=x['deleting'], reanalyzing=x['reanalyzing']) for x in response.json()] - def get_version(self): - """ Retrieves the teamscale version """ - response_text = self.get(self.get_global_service_url("health-metrics"), {"metric": "version"}).text - return response_text.split()[1] - def create_project(self, project_configuration): """Creates a project with the specified configuration in Teamscale. @@ -400,7 +394,7 @@ def create_project(self, project_configuration): Raises: ServiceError: If anything goes wrong. """ - return self._add_project(project_configuration, perform_update_call=False) + return self._add_project(project_configuration, perfrom_update_call=False) def update_project(self, project_configuration): """Updates an existing project in Teamscale with the given configuration. The id of the existing project is @@ -414,20 +408,20 @@ def update_project(self, project_configuration): Raises: ServiceError: If anything goes wrong. """ - return self._add_project(project_configuration, perform_update_call=True) + return self._add_project(project_configuration, perfrom_update_call=True) - def _add_project(self, project_configuration, perform_update_call): - """Adds a project to Teamscale. The parameter `perform_update_call` specifies, whether an update call should be + def _add_project(self, project_configuration, perfrom_update_call): + """Adds a project to Teamscale. The parameter `perfrom_update_call` specifies, whether an update call should be made: - - If `perform_update_call` is set to `True`, re-adding a project with an existing id will update the original + - If `perfrom_update_call` is set to `True`, re-adding a project with an existing id will update the original project. - - If `perform_update_call` is set to `False`, re-adding a project with an existing id will result in an error. - - Further, if `perform_update_call` is set to `True`, but no project with the specified id exists, an error is + - If `perfrom_update_call` is set to `False`, re-adding a project with an existing id will result in an error. + - Further, if `perfrom_update_call` is set to `True`, but no project with the specified id exists, an error is thrown as well. Args: project_configuration (data.ProjectConfiguration): The project that is to be created (or updated). - perform_update_call (bool): Whether to perform an update call. + perfrom_update_call (bool): Whether to perform an update call. Returns: requests.Response: object generated by the upload request. @@ -436,13 +430,15 @@ def _add_project(self, project_configuration, perform_update_call): """ service_url = self.get_global_service_url("create-project") parameters = { - "only-config-update": perform_update_call + "only-config-update": perfrom_update_call } response = self.put(service_url, parameters=parameters, data=to_json(project_configuration)) response_message = TeamscaleClient._get_response_message(response) if response_message != 'success': - raise ServiceError("GET", service_url, response) + raise ServiceError( + "ERROR: GET {url}: {status_code}:{message}".format(url=service_url, status_code=response.status_code, + message=response_message)) return response @staticmethod From 24c322408347f4b9a1366aeaf90a466311f1c292 Mon Sep 17 00:00:00 2001 From: pawelka Date: Fri, 23 Mar 2018 15:17:07 +0100 Subject: [PATCH 60/79] Small fixes --- teamscale_client/data.py | 20 ++++++-------------- tools/migration/migrator_base.py | 9 +++------ tools/migration/task_migrator.py | 2 +- 3 files changed, 10 insertions(+), 21 deletions(-) diff --git a/teamscale_client/data.py b/teamscale_client/data.py index e4efa00..0ec7958 100644 --- a/teamscale_client/data.py +++ b/teamscale_client/data.py @@ -5,7 +5,8 @@ import datetime import time -from teamscale_client.constants import Assessment, MetricAggregation, MetricValueType, MetricProperties, ConnectorType +from teamscale_client.constants import Assessment, MetricAggregation, MetricValueType, MetricProperties, \ + AssessmentMetricColors, ConnectorType from teamscale_client.utils import auto_str @@ -16,7 +17,7 @@ class Finding(object): Args: finding_type_id (str): The type id that this finding belongs to. message (str): The main finding message - assessment (constants.Assessment): The assessment this finding should have. Default is `YELLOW`. + assesssment (constants.Assessment): The assessment this finding should have. Default is `YELLOW`. This value is only important if in Teamscale the finding enablement is set to auto, otherwise the setting from Teamscale will be used. start_offset (int): Offset from the beginning of the file, where the finding area starts @@ -65,10 +66,8 @@ def __init__(self, findings, path, content=None): Args: typeid (str): The id used to reference the finding type. - description (str): The text to display that explains what this finding type is about - (and ideally how to fix it) This text will be the same for each concrete instance of the finding. - enablement (constants.Enablement): Describes the default enablement setting for this finding type, used when it - is added to the analysis profile. + description (str): The text to display that explains what this finding type is about (and ideally how to fix it). This text will be the same for each concrete instance of the finding. + enablement (constants.Enablement): Describes the default enablement setting for this finding type, used when it is added to the analysis profile. """ @@ -182,14 +181,7 @@ def _set_date(self, date_object): class ServiceError(Exception): """Teamscale service returned an error.""" - - def __init__(self, method, url, response): - self.message = "ERROR: {0} {1}: {r.status_code}:{r.text}".format(method, url, r=response) - self.response = response - - # This prevents the compressing of a response into a single line, making it unreadable (no repr()!) - def __str__(self): - return self.message + pass @auto_str diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index d172486..7d9eedd 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -151,11 +151,7 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): self.logger.debug("Service Call: {}".format((url, parameters))) response = client.get(url, parameters).json() except ServiceError as e: - status_code = e.response.status_code - if status_code in (400, 404): - raise - else: - self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) + self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) self.cache_request((url, parameters), response, use_cache) return response @@ -235,8 +231,9 @@ def match_finding(self, finding1, finding2): location_match = self.dicts_match(finding1["location"], finding2["location"], ["location", "uniformPath", "@class"]) + # Exclude category and message, because this might change with an update to a newer TS-version properties_match = self.dicts_match(finding1, finding2, - ["location", "id", "birth", "analysisTimestamp"]) + ["location", "id", "birth", "analysisTimestamp", "message", "categoryName"]) return location_match and properties_match diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 3caa64a..9400419 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -17,7 +17,7 @@ def migrate(self): old_tasks = self.get_from_old("tasks", parameters={"details": True}) if len(old_tasks) == 0: self.logger.info("No new tasks to migrate.") - exit(1) + exit(0) self.logger.info("Migrating %s tasks" % len(old_tasks)) for old_task in old_tasks: From 0db5463d34a3ca4542a4772e6bcd27238d1cb383 Mon Sep 17 00:00:00 2001 From: pawelka Date: Thu, 17 May 2018 16:03:08 +0200 Subject: [PATCH 61/79] Added upload of finding-groups and -descriptions --- tools/external-findings-upload/__init__.py | 0 .../examples/cs_findings_desc.json | 914 ++++++++++++++++++ .../examples/cs_group_desc.json | 4 + .../examples/cs_scs_upload.sh | 10 + .../examples/scs_findings_desc.json | 194 ++++ .../examples/scs_group_desc.json | 4 + .../examples/ts_config | 6 + .../upload_finding_descriptions.py | 46 + .../upload_group_description.py | 45 + 9 files changed, 1223 insertions(+) create mode 100644 tools/external-findings-upload/__init__.py create mode 100644 tools/external-findings-upload/examples/cs_findings_desc.json create mode 100644 tools/external-findings-upload/examples/cs_group_desc.json create mode 100755 tools/external-findings-upload/examples/cs_scs_upload.sh create mode 100644 tools/external-findings-upload/examples/scs_findings_desc.json create mode 100644 tools/external-findings-upload/examples/scs_group_desc.json create mode 100644 tools/external-findings-upload/examples/ts_config create mode 100755 tools/external-findings-upload/upload_finding_descriptions.py create mode 100755 tools/external-findings-upload/upload_group_description.py diff --git a/tools/external-findings-upload/__init__.py b/tools/external-findings-upload/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/external-findings-upload/examples/cs_findings_desc.json b/tools/external-findings-upload/examples/cs_findings_desc.json new file mode 100644 index 0000000..922850d --- /dev/null +++ b/tools/external-findings-upload/examples/cs_findings_desc.json @@ -0,0 +1,914 @@ +[ + { + "typeId": "CS0108", + "name": "'member1' hides inherited member 'member2'. Use the new keyword if hiding was intended.", + "description": "A variable was declared with the same name as a variable in a base class. However, the new keyword was not used. This warning informs you that you should use new; the variable is declared as if new had been used in the declaration.\nThe following sample generates CS0108:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1570", + "name": "XML comment on 'construct' has badly formed XML \u2014 'reason'", + "description": "When using /doc, any comments in the source code must be in XML. Any error with your XML markup will generate CS1570. For example:\nIf you are passing a string to a cref, such as in an tag, the string must be enclosed in double quotation marks. If you are using a tag, such as , which does not have a closing tag, you must specify a forward slash before the closing angle bracket. If you need to use a greater-than or less-than symbol in the text of description, you need to represent them with > or <. The file or path attribute on an tag was missing or improperly formed.\nThe following sample generates CS1570:", + "enablement": "YELLOW" + }, + { + "typeId": "CS2002", + "name": "Source file 'file' specified multiple times", + "description": "A source file name was passed to the compiler more than once. You can only specify a file once to the compiler to build an output file.\nThis warning cannot be suppressed by the /nowarn option.\nThe following sample generates CS2002:\nTo generate the error, compile the example with the command line:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1572", + "name": "XML comment on 'construct' has a param tag for 'parameter', but there is no parameter by that name", + "description": "When using the /doc compiler option, a comment was specified for a parameter that does not exist for the method. Change the value passed to the name attribute or remove one of the comment lines.\nThe following sample generates CS1572:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0109", + "name": "The member 'member' does not hide an inherited member. The new keyword is not required", + "description": "A class declaration included the new keyword even though the declaration does not override an existing declaration in a base class. You can delete the new keyword.\nThe following sample generates CS0109:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0414", + "name": "The private field 'field' is assigned but its value is never used", + "description": "This warning can occur in several scenarios in which the compiler can verify that a variable is never referenced:\nA private field is assigned a constant value but is never subsequently read. The unnecessary assignment could effect performance. Consider removing the field. A private or internal static field is assigned a constant value only in the initializer. Consider changing the field to a const. A private or internal field is assigned constant values and only used in blocks that are excluded by #ifdef directives. Consider putting the field inside the #ifdef block. A private or internal field is assigned constant values in multiple locations but not otherwise accessed. If you do not need the field, consider removing it. Otherwise, use it in some appropriate way.\nIn other situations, or where the suggested workaround is not acceptable, use #pragma 0414.\nThe following sample shows one way in which CS0414 will be generated:\nNote\u00a0\u00a0\u00a0If the variable i is declared as protected or public, no error will be generated because the compiler cannot know whether a derived class might use it or some other client code might instantiate the class and reference the variable", + "enablement": "YELLOW" + }, + { + "typeId": "CS1571", + "name": "XML comment on 'construct' has a duplicate param tag for 'parameter'", + "description": "When using the /doc compiler option, multiple comments were found for the same method parameter. Remove one of the duplicate lines.\nThe following sample generates CS1571:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1607", + "name": "Assembly generation -- reason", + "description": "A warning was generated from the assembly-creation phase of the compilation.\nIf you are building a 64-bit application on a 32-bit operating system, you must ensure that 64-bit versions of all referenced assemblies are installed on the target operating system.\nAll x86-specific common language runtime (CLR) assemblies have 64-bit counterparts (every CLR assembly will exist on all operating systems). Therefore, you can safely ignore CS1607 for CLR assemblies.\nYou can ignore this warning if you encounter it when you create a AssemblyInformationalVersionAttribute. The informational version is a string that attaches additional version information to an assembly; this information is not used at run time. Although you can specify any text, a warning message appears on compilation if the string is not in the format that is used by the assembly version number, or if it is in that format but contains wildcard characters. This warning is harmless.\nFor more information, see Al.exe Tool Errors and Warnings.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0183", + "name": "The given expression is always of the provided ('type') type", + "description": "If a conditional statement always evaluates to true, then you do not need a conditional statement. This warning occurs when you try to evaluate a type using the is operator. If the evaluation is a value type, then the check is unnecessary.\nThe following sample generates CS0183:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0184", + "name": "The given expression is never of the provided ('type') type", + "description": "The expression can never be true because the variable you are testing is neither declared as type nor derived from type.\nThe following sample generates CS0184:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0419", + "name": "Ambiguous reference in cref attribute: 'Method Name1'. Assuming 'Method Name2', but could have also matched other overloads including 'Method Name3'.", + "description": "In an XML documentation comment in the code, a reference could not be resolved. This could occur if the method is overloaded, or if two different identifiers with the same name are found. To resolve the warning, use a qualified name to disambiguate the reference, or include the specific overload in parentheses.\nThe following sample generates CS0419.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1957", + "name": "Member 'name' overrides 'method'. There are multiple override candidates at run-time. It is implementation dependent which method will be called.", + "description": "Method parameters that vary only by whether they are ref or out cannot be differentiated at run-time.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1956", + "name": "Member 'name' implements interface member 'name' in type 'type'. There are multiple matches for the interface member at run-time. It is implementation dependent which method will be called.", + "description": "This warning can be generated when two interface methods are differentiated only by whether a particular parameter is marked with ref or with out. It is best to change your code to avoid this warning because it is not obvious or guaranteed which method is called at runtime.\nAlthough C# distinguishes between out and ref, the CLR sees them as the same. When deciding which method implements the interface, the CLR just picks one.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0105", + "name": "The using directive for 'namespace' appeared previously in this namespace", + "description": "A namespace, which should only be declared once, was declared more than once; remove all duplicate namespace declarations.\nThe following sample generates CS0105:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0628", + "name": "'member' : new protected member declared in sealed class", + "description": "A sealed class cannot introduce a protected member because no other class will be able to inherit from the sealed class and use the protected member.\nThe following sample generates CS0628:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0626", + "name": "Method, operator, or accessor 'method' is marked external and has no attributes on it. Consider adding a DllImport attribute to specify the external implementation", + "description": "A method marked extern should also be marked with an attribute, for example, the DllImport attribute.\nThe attribute specifies where the method is implemented. At run time, the program will need this information.\nThe following sample generates CS0626:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1573", + "name": "Parameter 'parameter' has no matching param tag in the XML comment for 'parameter' (but other parameters do)", + "description": "When using the /doc compiler option, a comment was specified for some but not all parameters in a method. You may have forgotten to enter a comment for these parameters.\nThe following sample generates CS1573:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1574", + "name": "XML comment on 'construct' has syntactically incorrect cref attribute 'name'", + "description": "A string passed to a cref tag, for example, within an tag, referred to a member that is not available within the current build environment. The string that you pass to a cref tag must be the syntactically correct name of a member or field.\nFor more information, see Recommended Tags for Documentation Comments.\nThe following sample generates CS1574:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0824", + "name": "Constructor 'name' is marked external.", + "description": "A constructor may be marked as extern. However, the compiler cannot verify that the constructor actually exists. Therefore the warning is generated.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1616", + "name": "Option 'option' overrides attribute 'attribute' given in a source file or added module", + "description": "This warning occurs if the assembly attributes AssemblyKeyFileAttribute or AssemblyKeyNameAttribute found in source conflict with the /keyfile or /keycontainer command line option or key file name or key container specified in the Project Properties.\nFor the example below, assume you have a key file named cs1616.snk. This file could be generated with the command line:\nThe following sample generates CS1616:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0422", + "name": "The /incremental option is no longer supported", + "description": "Incremental compilation (/incr or /incremental) is not supported in Visual C# 2005.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0420", + "name": "'identifier': a reference to a volatile field will not be treated as volatile", + "description": "A volatile field should not normally be passed using a ref or out parameter, since it will not be treated as volatile within the scope of the function. There are exceptions to this, such as when calling an interlocked API. As with any warning, you may use the #pragma warning to disable this warning in those rare cases where you are intentionally using a volatile field as a reference parameter.\nThe following sample generates CS0420:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0809", + "name": "Obsolete member 'memberA' overrides non-obsolete member 'memberB'.", + "description": "Typically, a member that is marked as obsolete should not override a member that is not marked as obsolete. This warning is generated in Visual Studio 2008 but not in Visual Studio 2005.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1058", + "name": "A previous catch clause already catches all exceptions. All exceptions thrown will be wrapped in a System.Runtime.CompilerServices.RuntimeWrappedException", + "description": "This attribute causes CS1058 if a catch() block has no specified exception type after a catch (System.Exception e) block. The warning advises that the catch() block will not catch any exceptions.\nA catch() block after a catch (System.Exception e) block can catch non-CLS exceptions if the RuntimeCompatibilityAttribute is set to false in the AssemblyInfo.cs file: [assembly: RuntimeCompatibilityAttribute(WrapNonExceptionThrows = false)]. If this attribute is not set explicitly to false, all thrown non-CLS exceptions are wrapped as Exceptions and the catch (System.Exception e) block catches them. For more information, see How to: Catch a non-CLS Exception.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0429", + "name": "Unreachable expression code detected", + "description": "This error occurs whenever part of an expression in your code is unreachable. In the following example, the condition false && myTest() meets this criteria because the myTest() method will never get evaluated due to the fact that the left side of the && operation is always false. As soon as the && operator evaluates the false statement as false, it stops the evaluation, and will never evaluate the right side.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1610", + "name": "Unable to delete temporary file 'file' used for default Win32 resource -- resource", + "description": "When using the /win32res compiler option and when your %TEMP% directory does not have DELETE permission, this warning indicates that the compiler could not delete a temporary file that it created.\nMake sure that you have read/write/delete permissions for the %TEMP% directory.\nIf necessary, you can manually delete these files and there will be no harm to C# or any of your programs.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0618", + "name": "'member' is obsolete: 'text'", + "description": "A class member was marked with the Obsolete attribute, such that a warning will be issued when the class member is referenced. For more information, see Common Attributes (C# and Visual Basic).\nThe following sample generates CS0618:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0612", + "name": "'member' is obsolete", + "description": "The class designer marked a member with the Obsolete attribute. This means that the member might not be supported in a future version of the class.\nThe following sample shows how accessing an obsolete member generates CS0612:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0028", + "name": "'function declaration' has the wrong signature to be an entry point", + "description": "The method declaration for Main was invalid: it was declared with an invalid signature. Main must be declared as static and it must return either int or void. For more information, see Main() and Command-Line Arguments (C# Programming Guide).\nThe following sample generates CS0028:", + "enablement": "YELLOW" + }, + { + "typeId": "CS2029", + "name": "Invalid value for '/define'; 'identifier' is not a valid identifier", + "description": "This warning occurs if the value that is used in the /define option has some invalid characters.\nThis warning cannot be suppressed by the /nowarn option.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0693", + "name": "Type parameter 'type parameter' has the same name as the type parameter from outer type 'type'", + "description": "This error occurs when you have a generic member such as a method inside a generic class. Since the method's type parameter is not necessarily the same as the class's type parameter, you cannot give them both the same name. For more information, see Generic Methods (C# Programming Guide).\nTo avoid this situation, use a different name for one of the type parameters.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0435", + "name": "The namespace 'namespace' in 'assembly' conflicts with the imported type 'type' in 'assembly'. Using the namespace defined in 'assembly'..", + "description": "This warning is issued when a namespace in a source file (file_2) conflicts with an imported type in file_1. The compiler uses the one in the source file.\nThe following example generates CS0435:\nCompile this file first:\nThen, compile this file:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0436", + "name": "The type 'type' in 'assembly' conflicts with the imported type 'type2' in 'assembly'. Using the type defined in 'assembly'.", + "description": "This warning is issued when a type in a source file (file_2) conflicts with an imported type in file _1. The compiler uses the one in the source file.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0437", + "name": "The type 'type' in 'assembly2' conflicts with the imported namespace 'namespace' in 'fassembly1'. Using the type defined in 'assembly'.", + "description": "This warning is issued when a type in a source file, file_2, conflicts with an imported namespace in file _1. The compiler uses the type in the source file.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0649", + "name": "Field 'field' is never assigned to, and will always have its default value 'value'", + "description": "The compiler detected an uninitialized private or internal field declaration that is never assigned a value.\nThe following sample generates CS0649:", + "enablement": "YELLOW" + }, + { + "typeId": "CS2023", + "name": "Ignoring /noconfig option because it was specified in a response file", + "description": "The /noconfig compiler option was specified in a response file, which is not allowed.\nThis warning cannot be suppressed by the /nowarn option.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1762", + "name": "A reference was created to embedded interop assembly '' because of an indirect reference to that assembly from assembly ''. Consider changing the 'Embed Interop Types' property on either assembly.", + "description": "You have added a reference to an assembly (assembly1) that has the Embed Interop Types property set to True. This instructs the compiler to embed interop type information from that assembly. However, the compiler cannot embed interop type information from that assembly because another assembly that you have referenced (assembly2) also references that assembly (assembly1) and has the Embed Interop Types property set to False.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0642", + "name": "Possible mistaken empty statement", + "description": "A semicolon after a conditional statement may cause your code to execute differently than intended.\nYou can use /nowarn compiler option or #pragmas warning to disable this warning; see /nowarn (Suppress Specified Warnings) (C# Compiler Options) or #pragma warning (C# Reference) for more information.\nThe following sample generates CS0642:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0440", + "name": "Defining an alias named 'global' is ill-advised since 'global::' always references the global namespace and not an alias", + "description": "This warning is issued when you define an alias named global.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0197", + "name": "Passing 'argument' as ref or out or taking its address may cause a runtime exception because it is a field of a marshal-by-reference class", + "description": "Any class that derives, directly or indirectly, from MarshalByRefObject is a marshal-by-reference class. Such a class can be marshaled by reference across process and machine boundaries. Thus, instances of this class could be proxies to remote objects. You cannot pass a field of a proxy object as ref or out. So, you cannot pass fields of such a class as ref or out, unless the instance is this, which can not be a proxy object.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0444", + "name": "Predefined type 'type name 1' was not found in 'System namespace 1' but was found in 'System namespace 2'", + "description": "A predefined object such as Int32 was not found where the compiler expected to find it, but instead found it in 'System namespace 2'.\nThe error could indicate that the .NET Framework is installed incorrectly. To fix this, reinstall the .NET Framework.\nIf you are writing your own base class libraries, you might also encounter this error. In this case, to resolve the error, rebuild mscorlib.", + "enablement": "YELLOW" + }, + { + "typeId": "CS2014", + "name": "Compiler option 'old option' is obsolete, please use 'new option' instead", + "description": "The form of the compiler option is deprecated. See C# Compiler Options for more information.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0219", + "name": "The variable 'variable' is assigned but its value is never used", + "description": "The compiler issues a level-three warning, when you declare and assign a variable, but do not use it.\nThe following sample generates CS0219:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0114", + "name": "'function1' hides inherited member 'function2'. To make the current method override that implementation, add the override keyword. Otherwise add the new keyword.", + "description": "A declaration in a class conflicts with a declaration in a base class such that the base class member will be hidden.\nFor more information, see base.\nThe following sample generates CS0114:", + "enablement": "YELLOW" + }, + { + "typeId": "CS4014", + "name": "Because this call is not awaited, execution of the current method continues before the call is completed. Consider applying the 'await' operator to the result of the call.", + "description": "The current method calls an async method that returns a Task or a Task and doesn\u2019t apply the await operator to the result. The call to the async method starts an asynchronous task. However, because no await operator is applied, the program continues without waiting for the task to complete. In most cases, that behavior isn't what you expect. Usually other aspects of the calling method depend on the results of the call or, minimally, the called method is expected to complete before you return from the method that contains the call.\nAn equally important issue is what happens to exceptions that are raised in the called async method. An exception that's raised in a method that returns a Task or Task is stored in the returned task. If you don't await the task or explicitly check for exceptions, the exception is lost. If you await the task, its exception is rethrown.\nAs a best practice, you should always await the call.\nYou should consider suppressing the warning only if you're sure that you don't want to wait for the asynchronous call to complete and that the called method won't raise any exceptions. In that case, you can suppress the warning by assigning the task result of the call to a variable.\nThe following example shows how to cause the warning, how to suppress it, and how to await the call.\nIn the example, if you choose Call #1 or Call #2, the unawaited async method (CalledMethodAsync) finishes after both its caller (CallingMethodAsync) and the caller's caller (startButton_Click) are complete. The last line in the following output shows you when the called method finishes. Entry to and exit from the event handler that calls CallingMethodAsync in the full example are marked in the output.\nYou can also suppress compiler warnings by using #pragma warning (C# Reference) directives.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1030", + "name": "#warning: 'text'", + "description": "Displays the text of a warning defined with the #warning directive.\nThe following sample shows how to create a user-defined warning:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1203", + "name": "The feature 'feature' is deprecated. Please use 'feature' instead'.", + "description": "The feature 'invalid feature' is deprecated. Please use 'valid feature' instead'.\nThe feature you are attempting to use is now deprecated. Update your code to use the valid feature instead.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0728", + "name": "Possibly incorrect assignment to local 'variable' which is the argument to a using or lock statement. The Dispose call or unlocking will happen on the original value of the local.", + "description": "There are several scenarios where using or lock blocks will result in a temporary leak of resources. Here is one example:\nthisType f = null;\nusing (f)\n{\nf = new thisType();\n...\n}\nIn this case, the original value, such as null, of the variable thisType will be disposed of when the using block finishes executing, but the thisType object created inside the block will not be, although it will eventually get garbage collected.\nTo resolve this error, use the following form:\nusing (thisType f = new thisType())\n{\n...\n}\nIn this case, the newly allocated thisType object will be disposed of.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1645", + "name": "Feature 'feature' is not part of the standardized ISO C# language specification, and may not be accepted by other compilers", + "description": "The feature you are using is not part of the ISO standard. Code using this feature may not compile on other compilers.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3019", + "name": "CLS compliance checking will not be performed on 'type' because it is not visible from outside this assembly.", + "description": "This warning occurs when a type or a member that has the CLSCompliantAttribute attribute is not visible from another assembly. To resolve this error, remove the attribute on any classes or members that are not visible from the other assembly, or make the type or members visible. For more information on CLS Compliance, see Writing CLS-Compliant Code.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3018", + "name": "'type' cannot be marked as CLS-Compliant because it is a member of non CLS-compliant type 'type'", + "description": "This warning occurs if a nested class with the CLSCompliant attribute set to true is declared as a member of a class declared with the CLSCompliant attribute set to false. This is not allowed, since a nested class cannot be CLS-compliant if it is a member of an outer class that is not CLS-compliant. To resolve this warning, remove the CLSCompliant attribute from the nested class, or change it from true to false. For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3015", + "name": "'method signature' has no accessible constructors which use only CLS-compliant types", + "description": "To be compliant with the Common Language Specification (CLS), the argument list of an attribute class cannot contain an array. For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3014", + "name": "'member' does not need a CLSCompliant attribute because the assembly does not have a CLSCompliant attribute", + "description": "In a source code file where compliance with the Common Language Specification (CLS) was not specified, a construct in the file was marked as being CLS compliant. This is not allowed. To resolve this warning, add an assembly level CLS compliant attribute to the file (in the following example, uncomment the line that contains the assembly level attribute). For more information about CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3017", + "name": "You cannot specify the CLSCompliant attribute on a module that differs from the CLSCompliant attribute on the assembly (CS3017)", + "description": "This warning occurs if you have a assembly CLSCompliant attribute that conflicts with a module CLSCompliant attribute. An assembly that is CLS compliant cannot contain modules that are not CLS compliant. To resolve this warning, make sure the assembly and module CLSCompliant attributes are either both true or both false, or remove one of the attributes. For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3016", + "name": "Arrays as attribute arguments is not CLS-compliant", + "description": "It is not compliant with the Common Language Specification (CLS) to pass an array to an attribute. For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3010", + "name": "'member': CLS-compliant interfaces must have only CLS-compliant members", + "description": "In an assembly marked with [assembly:CLCSompliant(true)], an interface contains a member marked with [CLCSompliant(false)]. Remove one of the Common Language Specification (CLS) compliance attributes. For more information about CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0282", + "name": "There is no defined ordering between fields in multiple declarations of partial class or struct 'type'. To specify an ordering, all instance fields must be in the same declaration.", + "description": "To resolve this error, put all member variables in a single partial class definition.\nA common way to get this error is by having a partial struct defined in more than one place, with some of the member variables in one definition, and some in another.\nThe following code generates CS0282.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3011", + "name": "'member': only CLS-compliant members can be abstract", + "description": "A class member cannot be both abstract and non-compliant with the Common Language Specification (CLS). The CLS specifies that all class members shall be implemented. For more information about CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0665", + "name": "Assignment in conditional expression is always constant; did you mean to use == instead of = ?", + "description": "A conditional expression used the = operator and not the == operator.\nThe following sample generates CS0665:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1700", + "name": "Assembly reference Assembly Name is invalid and cannot be resolved", + "description": "This warning indicates that an attribute, such as InternalsVisibleToAttribute, was not specified correctly.\nFor more information, see Friend Assemblies (C# and Visual Basic).", + "enablement": "YELLOW" + }, + { + "typeId": "CS3012", + "name": "You cannot specify the CLSCompliant attribute on a module that differs from the CLSCompliant attribute on the assembly (CS3012)", + "description": "In order for a module to be compliant with the Common Language Specification (CLS) through [module:System.CLCSompliant(true)], it must be built with the /target:module compiler option. For more information on the CLS, see Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1701", + "name": "Assuming assembly reference \"Assembly Name #1\" matches \"Assembly Name #2\", you may need to supply runtime policy (CS1701)", + "description": "The two assemblies differ in release and/or version number. For unification to occur, you must specify directives in the application's .config file, and you must provide the correct strong name of an assembly, as demonstrated in the following example code.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3013", + "name": "Added modules must be marked with the CLSCompliant attribute to match the assembly", + "description": "A module that was compiled with the /target:module compiler option was added to a compilation with /addmodule. However, the module's compliance with the Common Language Specification (CLS) does not agree with the CLS state of the current compilation.\nCLS compliance is indicated with the module attribute. For example, [module:CLSCompliant(true)] indicates that the module is CLS compliant, and [module:CLSCompliant(false)] indicates that the module is not CLS compliant. The default is [module:CLSCompliant(false)]. For more information on the CLS, see Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1911", + "name": "Access to member 'name' through a 'base' keyword from an anonymous method, lambda expression, query expression, or iterator results in unverifiable code. Consider moving the access into a helper method on the containing type.", + "description": "Calling virtual functions with the base keyword inside the method body of an iterator or anonymous methods will result in unverifiable code. Unverifiable code will fail to run in a partial trust environment.\nOne resolution for CS1911 is to move the virtual function call to a helper function.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0661", + "name": "'class' defines operator == or operator != but does not override Object.GetHashCode()", + "description": "The compiler detected the user-defined equality or inequality operator, but no override for the GetHashCode function. A user-defined equality or inequality operator implies that you also want to override the GetHashCode function.\nThe following sample generates CS0661:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0660", + "name": "Updated: September 2008", + "description": "The compiler detected the user-defined equality or inequality operator, but no override for the Equals function. A user-defined equality or inequality operator implies that you also want to override the Equals function. For more information, see Equality Comparisons (C# Programming Guide).\nThe following sample generates CS0660:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0280", + "name": "'type' does not implement the 'pattern name' pattern. 'method name' has the wrong signature.", + "description": "Two statements in C#, foreach and using, rely on predefined patterns, \"collection\" and \"resource\" respectively. This warning occurs when the compiler cannot match one of these statements to its pattern due to a method's incorrect signature. For example, the \"collection\" pattern requires that there be a method called MoveNext which takes no parameters and returns a boolean. Your code might contain a MoveNext method that has a parameter or perhaps returns an object.\nThe \"resource\" pattern and using provide another example. The \"resource\" pattern requires the Dispose method; if you define a property with the same name, you will get this warning.\nTo resolve this warning, ensure that the method signatures in your type match the signatures of the corresponding methods in the pattern, and ensure that you have no properties with the same name as a method required by the pattern.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1707", + "name": "Delegate 'DelegateName' bound to 'MethodName1' instead of 'MethodName2' because of new language rules", + "description": "C# 2.0 implements new rules for binding a delegate to a method. Additional information is considered that was not looked at in the past. This warning indicates that the delegate is now bound to a different overload of the method than it was previously bound to. You may wish to verify that the delegate really should be bound to 'MethodName1' instead of 'MethodName2'.\nFor a description of how the compiler determines which method to bind a delegate to, see Covariance and Contravariance in Delegates (C# Programming Guide).", + "enablement": "YELLOW" + }, + { + "typeId": "CS1709", + "name": "Filename specified for preprocessor directive is empty", + "description": "You have specified a preprocessor directive that includes a file name, but that file is empty. To resolve this warning, put the needed content into the file.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1702", + "name": "Assuming assembly reference \"Assembly Name #1\" matches \"Assembly Name #2\", you may need to supply runtime policy (CS1702)", + "description": "The two assembly references have differing build and/or revision numbers, so will not automatically unify. You may need to supply run-time policy to force unification by using directives in the application .config file.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0067", + "name": "The event 'event' is never used", + "description": "An event was declared but never used in the class in which it was declared.\nThe following sample generates CS0067:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1668", + "name": "Invalid search path 'path' specified in 'path string' -- 'system error message'", + "description": "The path supplied to /lib at the command line was not valid, or a path in the LIB environment variable is invalid. Check the path used to verify that it exists and can be accessed. The error message in single quotation marks is the error returned from the operating system.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0472", + "name": "The result of the expression is always 'value1' since a value of type 'value2' is never equal to 'null' of type 'value3'", + "description": "The compiler should warn if you use an operator with a constant null value.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3009", + "name": "'type': base type 'type' is not CLS-compliant", + "description": "A base type was marked as not having to be compliant with the Common Language Specification (CLS) in an assembly that was marked as being CLS compliant. Either remove the attribute that specifies the assembly is CLS compliant or remove the attribute that indicates the type is not CLS compliant. For more information on CLS Compliance, see Writing CLS-Compliant Code and Language Independence and Language-Independent Components.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0279", + "name": "'type name' does not implement the 'pattern name' pattern. 'method name' is either static or not public.", + "description": "There are several statements in C# that rely on defined patterns, such as foreach and using. For example, foreach relies on the collection class implementing the enumerable pattern. This error occurs when the compiler is unable to make the match due to a method being declared static or not public. Methods in patterns are required to be instances of classes, and to be public.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3008", + "name": "Identifier 'identifier' differing only in case is not CLS-compliant (CS3008)", + "description": "A public, protected, or protected internal identifier breaks compliance with the Common Language Specification (CLS) if it begins with an underscore character (_).For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1658", + "name": "'warning text'. See also error 'error code'", + "description": "The compiler emits this warning when it overrides an error with a warning. For information about the problem, refer to the error mentioned. To find the appropriate error from within the Visual Studio IDE, use the index. For example, if the text above reads \"See also error 'CS1037',\" look for CS1037 in the index.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3007", + "name": "Overloaded method 'method' differing only by unnamed array types is not CLS-compliant", + "description": "This error occurs if you have an overloaded method that takes a jagged array and the only difference between the method signatures is the element type of the array. To avoid this error, consider using a rectangular array rather than a jagged array; use an additional parameter to disambiguate the function call; rename one or more of the overloaded methods; or, if CLS Compliance is not needed, remove the CLSCompliantAttribute attribute. For more information on CLS Compliance, see Language Independence and Language-Independent Components.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3006", + "name": "Overloaded method 'method' differing only in ref or out, or in array rank, is not CLS-compliant", + "description": "A method does not cannot be overloaded based on the ref or out parameter and still comply with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0278", + "name": "'type' does not implement the 'pattern name' pattern. 'method name' is ambiguous with 'method name'.", + "description": "There are several statements in C# that rely on defined patterns, such as foreach and using. For example, foreach relies on the collection class implementing the \"enumerable\" pattern.\nCS0278 can occur if the compiler is unable to make the match due to ambiguities. For example, the \"enumerable\" pattern requires that there be a method called MoveNext, and your code might contain two methods called MoveNext. The compiler will attempt to find an interface to use, but it is recommended that you determine and resolve the cause of the ambiguity.\nFor more information, see How to: Access a Collection Class with foreach (C# Programming Guide).", + "enablement": "YELLOW" + }, + { + "typeId": "CS3005", + "name": "Identifier 'identifier' differing only in case is not CLS-compliant (CS3005)", + "description": "A public, protected, or protected internal identifier, which differs from another public, protected, or protected internal identifier only in the case of one or more letters, is not compliant with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3004", + "name": "Mixed and decomposed Unicode characters are not CLS-compliant", + "description": "Only composed UNICODE characters are allowed in public, protected, or protected internal identifiers in order to be compliant with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3003", + "name": "Type of 'variable' is not CLS-compliant", + "description": "A public, protected, or protected internal variable must be of a type that is compliant with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Language Independence and Language-Independent Components.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1711", + "name": "XML comment on 'type' has a typeparam tag for 'parameter', but there is no type parameter by that name", + "description": "The documentation of a generic type includes a tag for the type parameter that has the wrong name.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3001", + "name": "Argument type 'type' is not CLS-compliant", + "description": "A public, protected, or protected internal method must accept a parameter whose type is compliant with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1712", + "name": "Type parameter 'type parameter' has no matching typeparam tag in the XML comment on 'type' (but other type parameters do)", + "description": "The documentation of a generic type is missing a typeparam tag. For more information, see (C# Programming Guide).", + "enablement": "YELLOW" + }, + { + "typeId": "CS3002", + "name": "Return type of 'method' is not CLS-compliant", + "description": "A public, protected, or protected internal method must return a value whose type is compliant with the Common Language Specification (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code and Common Language Specification.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1710", + "name": "XML comment on 'type' has a duplicate typeparam tag for 'parameter'", + "description": "The documentation of a generic type includes a duplicate tag for the type parameter.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3000", + "name": "Methods with variable arguments are not CLS-compliant", + "description": "The arguments used in the method expose features that are not in the Common Language Specifications (CLS). For more information on CLS Compliance, see Writing CLS-Compliant Code.\nThe following example generates the warning CS3000.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0652", + "name": "Comparison to integral constant is useless; the constant is outside the range of type 'type'", + "description": "The compiler detected a comparison between a constant and a variable where the constant is out of the range of the variable.\nThe following sample generates CS0652:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1717", + "name": "Assignment made to same variable; did you mean to assign something else?", + "description": "This warning occurs when you assign a variable to itself, such as a = a.\nSeveral common mistakes can generate this warning:\nWriting a = a as the condition of an if statement, such as if (a = a). You probably meant to say if (a == a), which is always true, so you could write this more concisely as if (true). Mistyping. You probably meant to say a = b. In a constructor where the parameter has the same name as the field, not using the this keyword: you probably meant to say this.a = a.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1718", + "name": "Comparison made to same variable; did you mean to compare something else?", + "description": "If you meant to compare to something else, then you should simply correct the statement.\nBut another possibility is that you were testing for true or false, and were doing so by statements such as if (a == a) (true) or if (a < a) (false). It is better to simply say if (true) or if (false). There are two reasons for this:\nIt is simpler: it is always clearer to simply say what you mean. It helps avoid confusion: a new feature of C# 2.0 is nullable value types, which are analogous to the value null in Transact-SQL, the programming language used by SQL Server. Developers familiar with Transact-SQL might be concerned about the effect of nullable types on expressions such as if (a == a), because of the use of ternary logic in Transact-SQL. If you use true or false, you avoid this possible confusion.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0658", + "name": "'attribute modifier' is not a recognized attribute location. All attributes in this block will be ignored.", + "description": "An invalid attribute modifier was specified. See Attribute Targets for more information.\nThe following sample generates CS0658:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0657", + "name": "'attribute modifier' is not a valid attribute location for this declaration. Valid attribute locations for this declaration are 'locations'. All attributes in this block will be ignored.", + "description": "The compiler found an attribute modifier in an invalid location. See Attribute Targets for more information.\nThe following sample generates CS0657:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0659", + "name": "'class' overrides Object.Equals(object o) but does not override Object.GetHashCode()", + "description": "The compiler detected an override of the Equals function but no override for GetHashCode. An override of Equals implies that you also want to override GetHashCode.\nFor more information, see\nHashtable. Guidelines for Implementing Equals and the Equality Operator (==) Implementing the Equals Method GetHashCode\nThe following sample generates CS0659:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0169", + "name": "The private field 'class member' is never used", + "description": "A private variable was declared but never referenced. A common way to generate this warning is when you declare a private member of a class and do not use it.\nThe following sample generates CS0169:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1698", + "name": "Circular assembly reference 'AssemblyName1' does not match the output assembly name 'AssemblyName2'. Try adding a reference to 'AssemblyName1' or changing the output assembly name to match.", + "description": "CS1698 occurs when an assembly reference is incorrect. This can happen if a referenced assembly is recompiled. To resolve, do not replace an assembly that itself is a dependency of an assembly you are referencing.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1699", + "name": "Use command line option \"compiler_option\" or appropriate project settings instead of \"attribute_name\"", + "description": "In order to sign an assembly, it is necessary to specify a key file. Prior to Microsoft Visual C# 2005, you specified the key file using CLR attributes in source code. These attributes are now deprecated.\nBeginning in Microsoft Visual C# 2005, you should use the Signing Page of the Project Designer or the Assembly Linker to specify the key file.\nThe Signing Page of the Project Designer is the preferred method; for more information, see Signing Page, Project Designer and Managing Assembly and Manifest Signing.\nThe How to: Sign an Assembly with a Strong Name uses the following compiler options:\n/keyfile (C# Compiler Options) instead of the AssemblyKeyFileAttribute attribute. /keycontainer (C# Compiler Options) instead ofAssemblyKeyNameAttribute. /delaysign (C# Compiler Options) instead of AssemblyDelaySignAttribute.\nThese attributes have been deprecated for the following reasons:\nThere were security issues due to the attributes being embedded in the binary files produced by the compiler. Everyone who had your binary also had the keys stored in it. There were usability issues due to the fact that the path specified in the attributes was relative to the current working directory, which could change in the integrated development environment (IDE), or to the output directory. Thus, most times the key file is likely to be ..\\\\..\\\\mykey.snk. Attributes also make it more difficult for the project system to properly sign satellite assemblies. When you use the compiler options instead of these attributes, you can use a fully qualified path and file name for the key without anything being embedded in the output file; the project system and source code control system can properly manipulate that full path when projects are moved around; the project system can maintain a project-relative path to the key file, and still pass a full path to the compiler; other build programs can more easily sign outputs by passing the proper path directly to the compiler instead of generating a source file with the correct attributes. Using attributes with friend assemblies can hamper compiler efficiency. When you use attributes, the compiler does not know what the key is when it has to decide whether or not to grant friendship and so it has to guess. At the end of compilation, the compiler is able to verify the guess once it finally knows the key. When the key file is specified with a compiler option, the compiler can immediately decide whether to grant friendship.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1694", + "name": "Invalid filename specified for preprocessor directive. Filename is too long or not a valid filename.", + "description": "This warning occurs when using the #pragma checksum preprocessor directive. The file name specified is longer than 256 characters. To resolve this warning, use a shorter file name.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1695", + "name": "Invalid #pragma checksum syntax; should be #pragma checksum \"filename\" \"{XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX}\" \"XXXX...\"", + "description": "You should rarely encounter this error since the checksum is generally inserted at run time if you are generating code by means of the Code Dom API.\nHowever, if you were to type in this #pragma statement and mistype either the GUID or checksum, you would get this error. The syntax checking by the compiler does not validate that you typed in a correct GUID, but it does check for the right number of digits and delimiters, and that the digits are hexadecimal. Likewise, it verifies that the checksum contains an even number of digits, and that the digits are hexadecimal.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1696", + "name": "Single-line comment or end-of-line expected", + "description": "The compiler requires a preprocessor directive to be followed by an end-of-line terminator or by a single-line comment. The compiler has finished processing a valid preprocessor directive, and has encountered something that violates this syntax constraint.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0078", + "name": "The 'l' suffix is easily confused with the digit '1' -- use 'L' for clarity", + "description": "The compiler warns when it detects a cast to long using a lowercase l instead of an uppercase L.\nThe following sample generates CS0078:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0168", + "name": "The variable 'var' is assigned but its value is never used", + "description": "The compiler warns when a variable is declared but not used.\nThe following sample generates two CS0168 warnings:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1697", + "name": "Different checksum values given for 'file name'", + "description": "You have specified more than one checksum for a given file. The debugger uses the checksum value to determine which file to debug when there is more than one file in a project with the same name. Most users will not encounter this error, but if you are writing an application that generates code, you may run into it. To resolve this error, ensure that you generate the checksum only once for any given code file.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0602", + "name": "The feature 'old_feature' is deprecated. Please use 'new_feature' instead", + "description": "A language feature used in your code (old_feature) is still supported, but that support may be removed in a future release. Instead, you should use the recommended syntax (new_feature).", + "enablement": "YELLOW" + }, + { + "typeId": "CS1590", + "name": "Invalid XML include element -- Missing file attribute", + "description": "A path or doc attribute, passed to the tag, was missing or incomplete.\nThe following sample generates CS1590:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1591", + "name": "Missing XML comment for publicly visible type or member 'Type_or_Member'", + "description": "The /doc compiler option was specified, but one or more constructs did not have comments.\nThe following sample generates CS1591:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1522", + "name": "Empty switch block", + "description": "The compiler detected a switch block with no case or default statement. A switch block must have one or more case or default statements.\nThe following sample generates CS1522:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1592", + "name": "Badly formed XML in included comments file -- 'reason'", + "description": "A problem, reported as reason, was found in the file specified by the tag.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0684", + "name": "'interface' interface marked with 'CoClassAttribute' not marked with 'ComImportAttribute'", + "description": "If you specify CoClassAttribute on an interface, you must also specify ComImportAttribute.\nThe following sample generates CS0684:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1598", + "name": "XML parser could not be loaded for the following reason: 'reason'. The XML documentation file 'file' will not be generated.", + "description": "The /doc option was specified, but the compiler could not find and load msxml3.dll. Make sure that the file msxml3.dll is installed and registered.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1060", + "name": "Use of possibly unassigned field 'name'. Struct instance variables are initially unassigned if struct is unassigned.", + "description": "Struct members are initialized to their default value if you do not explicitly initialize them. The default value for class types (and other reference types) is null. If the class is not initialized before any attempt to access it, a NullReferenceException will be thrown at runtime. The compiler cannot determine definitively whether the class member will be initialized or not, and so CS1060 is a warning and not an error.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0465", + "name": "Introducing a 'Finalize' method can interfere with destructor invocation. Did you intend to declare a destructor?", + "description": "This warning occurs when you create a class with a method whose signature is public virtual void Finalize.\nIf such a class is used as a base class and if the deriving class defines a destructor, the destructor will override the base class Finalize method, not Finalize.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0464", + "name": "Comparing with null of type 'type' always produces 'false'", + "description": "This warning is produced when you perform a comparison between a nullable variable and null, and the comparison is not == or !=. To resolve this error, verify if you really want to check a value for null. A comparison like i == null can be either true of false. A comparison like i > null is always false.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0467", + "name": "Ambiguity between method 'method' and non-method 'non-method'. Using method group.", + "description": "Inherited members from different interfaces that have the same signature cause an ambiguity error.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1720", + "name": "Expression will always cause a System.NullReferenceException because the default value of 'generic type' is null", + "description": "If you write an expression involving the default of a generic type variable that is a reference type (for example, a class), this error will occur. Consider the following expression:\nSince T is a reference type, its default value is null, and so attempting to apply the ToString method to it will throw a NullReferenceException.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0688", + "name": "'method1' has a link demand, but overrides or implements 'method2' which does not have a link demand. A security hole may exist.", + "description": "The link demand set up on the derived class method can easily be circumvented by calling the base class method. To close the security hole, the base class method needs to also use the link demand. For more information, see Demand vs. LinkDemand.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0469", + "name": "The 'goto case' value is not implicitly convertible to type 'type'", + "description": "When you use goto case, there must be an implicit conversion from the value of the goto case to the type of the switch.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1723", + "name": "XML comment on 'param' has cref attribute 'attribute' that refers to a type parameter", + "description": "This error is generated by an XML comment that refers to a type parameter.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0162", + "name": "Unreachable code detected", + "description": "The compiler detected code that will never be executed.\nThe following sample generates CS0162:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1691", + "name": "'number' is not a valid warning number", + "description": "A number that was passed to the #pragma warning preprocessor directive was not a valid warning number. Verify that the number represents a warning, not an error or another sequence of characters.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1690", + "name": "Accessing a member on 'member' may cause a runtime exception because it is a field of a marshal-by-reference class", + "description": "This warning occurs when you try to call a method, property, or indexer on a member of a class that derives from MarshalByRefObject, and the member is a value type. Objects that inherit from MarshalByRefObject are typically intended to be marshaled by reference across an application domain. If any code ever attempts to directly access the value-type member of such an object across an application domain, a runtime exception will occur. To resolve the warning, first copy the member into a local variable and call the method on that variable.\nThe following sample generates CS1690:", + "enablement": "YELLOW" + }, + { + "typeId": "CS0164", + "name": "This label has not been referenced", + "description": "A label was declared but never used.\nThe following sample generates CS0164:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1692", + "name": "Invalid number", + "description": "A number of preprocessor directives, such as #pragma and #line, use numbers as parameters. One of these numbers is invalid because it is too big, in the wrong format, contains illegal characters, and so on. To correct this error, correct the number.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1635", + "name": "Cannot restore warning 'warning code' because it was disabled globally", + "description": "This warning occurs if you use the /nowarn command line option or project setting to disable a warning for the entire compilation unit, but you use #pragma warning restore to attempt to restore that warning. To resolve this error, remove the /nowarn command line option or project setting, or remove the #pragma warning restore for any warnings you are disabling via the command line or project settings. For more information, see the #pragma warning topic.\nThe following sample generates CS1635:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1634", + "name": "Expected disable or restore", + "description": "This error occurs if a #pragma warning clause is badly formed, such as if disable or restore was omitted. For more information, see the #pragma warning topic.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1633", + "name": "Unrecognized #pragma directive", + "description": "The pragma used was not one of the known pragmas supported by the C# compiler. To resolve this error, use only pragmas supported.\nThe following sample generates CS1633:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1687", + "name": "Source file has exceeded the limit of 16,707,565 lines representable in the PDB, debug information will be incorrect", + "description": "The PDB and debugger have some limitations about how big a file can be. If the source file is too big, the debugger will not behave properly beyond that limit. The user should either not emit debug information for that file by possibly using #line hidden, or they should find a way to shrink the file, possibly by splitting the file into multiple files. They might want to use the partial keyword to split up a large class.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1685", + "name": "The predefined type 'System.type name' is defined in multiple assemblies in the global alias; using definition from 'File Name'", + "description": "This error occurs when a predefined system type such as System.int32 is found in two assemblies. One way this can happen is if you are referencing mscorlib from two different places, such as trying to run the.Net Framework versions 1.0 and 1.1 side-by-side.\nThe compiler will use the definition from only one of the assemblies. The compiler searches only global aliases, does not search libraries defined /reference. If you have specified /nostdlib, the compiler will search for Object, and in the future start all searches for predefined types in the file where it found Object.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1683", + "name": "Reference to type 'Type Name' claims it is defined in this assembly, but it is not defined in source or any added modules", + "description": "This error can occur when you are importing an assembly that contains a reference back to the assembly you are currently compiling, but the assembly being compiled contains nothing matching the reference. One way to get to this situation is to compile your assembly, which initially does contain the member that the assembly being imported is referencing. Then you update your assembly, mistakenly removing the members that the imported assembly is referencing.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1684", + "name": "Reference to type 'Type Name' claims it is defined in 'Namespace', but it could not be found", + "description": "This error can be caused by a reference inside one namespace referring to a type that it says exists inside a second namespace, but the type does not exist. For example, mydll.dll says that type A exists inside yourdll.dll, but no such type exists inside yourdll.dll. One possible cause of this error is that the version of yourdll.dll you are using is too old and A has not yet been defined.\nThe following sample generates CS1684.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3027", + "name": "'type_1' is not CLS-compliant because base interface 'type_2' is not CLS-compliant", + "description": "A non-CLS compliant type cannot be a base type for a type that is CLS compliant.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0253", + "name": "Possible unintended reference comparison; to get a value comparison, cast the right hand side to type 'type'", + "description": "The compiler is doing a reference comparison. If you want to compare the value of strings, cast the right side of the expression to type.\nThe following sample generates CS0253:", + "enablement": "YELLOW" + }, + { + "typeId": "CS3026", + "name": "CLS-compliant field 'field' cannot be volatile", + "description": "A volatile variable should not be CLS compliant.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0402", + "name": "'identifier' : an entry point cannot be generic or in a generic type", + "description": "The entry point was found in a generic type. To remove this warning, implement Main in a non-generic class or struct.", + "enablement": "YELLOW" + }, + { + "typeId": "CS5000", + "name": "Unknown compiler option '/option'", + "description": "An invalid compiler option was specified.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1580", + "name": "Invalid type for parameter 'parameter number' in XML comment cref attribute", + "description": "When attempting to reference an overload form of a method, the compiler detected a syntax error. Typically, this indicates that the parameter name, and not the type, was specified. A malformed line will appear in the generated XML file.\nThe following sample generates CS1580:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1581", + "name": "Invalid return type in XML comment cref attribute", + "description": "When attempting to reference a method, the compiler detected an error due to an invalid return type.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1587", + "name": "XML comment is not placed on a valid language element", + "description": "Recommended tags for documentation comments are not allowed on all language elements. For example, a tag is not allowed on a namespace. For more information on XML comments, see Recommended Tags for Documentation Comments (C# Programming Guide).", + "enablement": "YELLOW" + }, + { + "typeId": "CS0672", + "name": "Member 'member1' overrides obsolete member 'member2. Add the Obsolete attribute to 'member1'", + "description": "The compiler found an override to a method marked as obsolete. However, the overriding method was not itself marked as obsolete. The overriding method will still generate CS0612, if called.\nReview your method declarations and explicitly indicate whether a method (and all of its overrides) should be marked obsolete.\nThe following sample generates CS0672:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1584", + "name": "XML comment on 'member' has syntactically incorrect cref attribute 'invalid_syntax'", + "description": "One of the parameters passed to a tag for documentation comments has invalid syntax. For more information, see Recommended Tags for Documentation Comments (C# Programming Guide).", + "enablement": "YELLOW" + }, + { + "typeId": "CS0458", + "name": "The result of the expression is always 'null' of type 'type name'", + "description": "This warning is caused by a nullable expression that always results in null.\nThe following code generates warning CS0458.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0252", + "name": "Possible unintended reference comparison; to get a value comparison, cast the left hand side to type 'type'", + "description": "The compiler is doing a reference comparison. If you want to compare the value of strings, cast the left side of the expression to type.\nThe following sample generates CS0252:", + "enablement": "YELLOW" + }, + { + "typeId": "CS3023", + "name": "CLSCompliant attribute has no meaning when applied to return types. Try putting it on the method instead.", + "description": "Function return types are not checked for CLS Compliance, since the CLS Compliance rules apply to methods and type declarations.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0251", + "name": "Indexing an array with a negative index (array indices always start at zero)", + "description": "Do not use a negative number to index into an array.\nThe following sample generates CS0251:", + "enablement": "YELLOW" + }, + { + "typeId": "CS3024", + "name": "Constraint type 'type' is not CLS-compliant.", + "description": "The compiler issues this warning because the use of a non-CLS-compliant type as a generic type constraint could make it impossible for code written in some languages to consume your generic class.", + "enablement": "YELLOW" + }, + { + "typeId": "CS0675", + "name": "Bitwise-or operator used on a sign-extended operand; consider casting to a smaller unsigned type first", + "description": "The compiler implicitly widened and sign-extended a variable, and then used the resulting value in a bitwise OR operation. This can result in unexpected behavior.\nThe following sample generates CS0675:", + "enablement": "YELLOW" + }, + { + "typeId": "CS3021", + "name": "'type' does not need a CLSCompliant attribute because the assembly does not have a CLSCompliant attribute", + "description": "This warning occurs if [CLSCompliant(false)] appears on a class in an assembly which does not have an assembly-level CLSCompliant attribute set to true (i.e., the line [assembly: CLSCompliant(true)]). Since the assembly is not declaring itself CLS compliant, there is no need for anything within the assembly to declare itself non-compliant, since it is assumed to be non-compliant. For more information on CLS Compliance, see Writing CLS-Compliant Code.\nTo get rid of this warning, remove the attribute or add the assembly level attribute.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1589", + "name": "Unable to include XML fragment 'fragment' of file 'file' -- reason", + "description": "The syntax (fragment) of a tag, which referenced a file (file), was incorrect for the specified reason.\nA malformed line will be placed in the generated XML file.\nThe following sample generates CS1589:", + "enablement": "YELLOW" + }, + { + "typeId": "CS1927", + "name": "Ignoring /win32manifest for module because it only applies to assemblies.", + "description": "A win32 manifest is only applied at the assembly level. Your module will compile but it will not have a manifest.", + "enablement": "YELLOW" + }, + { + "typeId": "CS3022", + "name": "CLSCompliant attribute has no meaning when applied to parameters. Try putting it on the method instead.", + "description": "Method parameters are not checked for CLS Compliance, since the CLS Compliance rules apply to methods and type declarations.", + "enablement": "YELLOW" + }, + { + "typeId": "CS1682", + "name": "Reference to type 'type' claims it is nested within 'nested type', but it could not be found", + "description": "This error arises when you import references that do not agree with other references or with code you have written. A common way to get this error is to write code that refers to a class in metadata, and then you either delete that class or modify its definition.", + "enablement": "YELLOW" + } +] diff --git a/tools/external-findings-upload/examples/cs_group_desc.json b/tools/external-findings-upload/examples/cs_group_desc.json new file mode 100644 index 0000000..c1578f6 --- /dev/null +++ b/tools/external-findings-upload/examples/cs_group_desc.json @@ -0,0 +1,4 @@ +{ + "groupName": "C# Compiler Warnings", + "mapping": "^CS[0-9]{4}$" +} \ No newline at end of file diff --git a/tools/external-findings-upload/examples/cs_scs_upload.sh b/tools/external-findings-upload/examples/cs_scs_upload.sh new file mode 100755 index 0000000..5531be8 --- /dev/null +++ b/tools/external-findings-upload/examples/cs_scs_upload.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +export PYTHONPATH=$(cd ../../../; pwd) + +# upload the group descriptions +python3 ../upload_group_description.py ts_config cs_group_desc.json +python3 ../upload_group_description.py ts_config scs_group_desc.json + +# upload the findings descriptions +python3 ../upload_finding_descriptions.py ts_config cs_findings_desc.json +python3 ../upload_finding_descriptions.py ts_config scs_findings_desc.json diff --git a/tools/external-findings-upload/examples/scs_findings_desc.json b/tools/external-findings-upload/examples/scs_findings_desc.json new file mode 100644 index 0000000..59d4db8 --- /dev/null +++ b/tools/external-findings-upload/examples/scs_findings_desc.json @@ -0,0 +1,194 @@ +[ + { + "typeId": "SCS0001", + "name": "Command Injection", + "description": "The dynamic value passed to the command execution should be validated.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0002", + "name": "SQL Injection (LINQ)", + "description": "LINQ: How to Query for Information", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0003", + "name": "XPath Injection", + "description": "The dynamic value passed to the XPath query should be validated.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0004", + "name": "Certificate Validation Disabled", + "description": "Certificate Validation has been disabled. The communication could be intercepted.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0005", + "name": "Weak Random Number Generator", + "description": "The random numbers generated could be predicted.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0006", + "name": "Weak hashing function", + "description": "MD5 or SHA1 have known collision weaknesses and are no longer considered strong hashing algorithms.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0007", + "name": "XML eXternal Entity Injection (XXE)", + "description": "The XML parser is configured incorrectly. The operation could be vulnerable to XML eXternal Entity (XXE) processing.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0008", + "name": "Cookie Without SSL Flag", + "description": "It is recommended to specify the Secure flag to new cookie.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0009", + "name": "Cookie Without HttpOnly Flag", + "description": "It is recommended to specify the HttpOnly flag to new cookie.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0010", + "name": "Weak cipher algorithm", + "description": "DES and 3DES are not considered a strong cipher for modern applications. Currently, NIST recommends the usage of AES block ciphers instead.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0011", + "name": "Weak CBC Mode", + "description": "The CBC mode alone is susceptible to padding oracle attack.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0012", + "name": "Weak ECB Mode", + "description": "ECB mode will produce the same result for identical blocks (ie: 16 bytes for AES). An attacker could be able to guess the encrypted message. The use of AES in CBC mode with a HMAC is recommended guaranteeing integrity and confidentiality.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0013", + "name": "Weak Cipher Mode", + "description": "The cipher text produced is susceptible to alteration by an adversary.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0014", + "name": "SQL Injection (WebControls)", + "description": "Unsafe usage of System.Web.UI.WebControls.SqlDataSource, System.Web.UI.WebControls.SqlDataSourceView or Microsoft.Whos.Framework.Data.SqlUtility.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0015", + "name": "Hardcoded Password", + "description": "The password configuration to this API appears to be hardcoded.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0016", + "name": "Cross-Site Request Forgery (CSRF)", + "description": "Anti-forgery token is missing.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0017", + "name": "Request Validation Disabled (Attribute)", + "description": "Request validation is disabled. Request validation allows the filtering of some XSS patterns submitted to the application.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0018", + "name": "Path Traversal", + "description": "A path traversal attack (also known as directory traversal) aims to access files and directories that are stored outside the expected directory.By manipulating variables that reference files with “dot-dot-slash (../)” sequences and its variations or by using absolute file paths, it may be possible to access arbitrary files and directories stored on file system including application source code or configuration and critical system files.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0019", + "name": "OutputCache Conflict", + "description": "Caching conflicts with authorization.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0020", + "name": "SQL Injection (OLE DB)", + "description": "Use parametrized queries to mitigate SQL injection.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0021", + "name": "Request Validation Disabled (Configuration File)", + "description": "The validateRequest which provides additional protection against XSS is disabled in configuration file.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0022", + "name": "Event Validation Disabled", + "description": "The enableEventValidation is disabled in configuration file.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0023", + "name": "View State Not Encrypted", + "description": "The viewStateEncryptionMode is not set to Always in configuration file.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0024", + "name": "View State MAC Disabled", + "description": "The enableViewStateMac is disabled in configuration file. (This feature cannot be disabled starting .NET 4.5.1)", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0025", + "name": "SQL Injection (ODBC)", + "description": "Use parametrized queries to mitigate SQL injection.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0026", + "name": "SQL Injection (MsSQL Data Provider)", + "description": "Use parametrized queries to mitigate SQL injection.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0027", + "name": "Open Redirect", + "description": "The dynamic value passed to the Redirect should be validated.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0028", + "name": "Insecure Deserialization", + "description": "Untrusted data passed for deserialization.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0029", + "name": "Cross-Site Scripting (XSS)", + "description": "A potential XSS was found. The endpoint returns a variable from the client input that has not been encoded. To protect against stored XSS attacks, make sure any dynamic content coming from user or data store cannot be used to inject JavaScript on a page. Most modern frameworks will escape dynamic content by default automatically (Razor for example) or by using special syntax (\u003c%: content %\u003e, \u003c%\u003d HttpUtility.HtmlEncode(content) %\u003e).", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0032", + "name": "Password RequiredLength Too Small", + "description": "The minimal length of a password is recommended to be set at least to 8.", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0033", + "name": "Password Complexity", + "description": "PasswordValidator should have at least two requirements for better security (RequiredLength, RequireDigit, RequireLowercase, RequireUppercase and/or RequireNonLetterOrDigit).", + "enablement": "YELLOW" + }, + { + "typeId": "SCS0034", + "name": "Password RequiredLength Not Set", + "description": "The RequiredLength property must be set with a minimum value of 8.", + "enablement": "YELLOW" + } +] \ No newline at end of file diff --git a/tools/external-findings-upload/examples/scs_group_desc.json b/tools/external-findings-upload/examples/scs_group_desc.json new file mode 100644 index 0000000..bda0656 --- /dev/null +++ b/tools/external-findings-upload/examples/scs_group_desc.json @@ -0,0 +1,4 @@ +{ + "groupName": "C# Security Checks", + "mapping": "^SCS[0-9]{4}$" +} \ No newline at end of file diff --git a/tools/external-findings-upload/examples/ts_config b/tools/external-findings-upload/examples/ts_config new file mode 100644 index 0000000..68707b7 --- /dev/null +++ b/tools/external-findings-upload/examples/ts_config @@ -0,0 +1,6 @@ +{ + "url": "http://localhost:8080", + "project": "owasp-webgoat.net", + "username": "admin", + "access_token": "gSFqSa6dK2AZiKHjxtlsU2Td6iPNVnGP" +} \ No newline at end of file diff --git a/tools/external-findings-upload/upload_finding_descriptions.py b/tools/external-findings-upload/upload_finding_descriptions.py new file mode 100755 index 0000000..a9e0606 --- /dev/null +++ b/tools/external-findings-upload/upload_finding_descriptions.py @@ -0,0 +1,46 @@ +import argparse +import json +import logging +import sys + +from teamscale_client import TeamscaleClient +from pathlib import Path + + +def main(): + instance, all_desc = get_config_data() + teamscale = TeamscaleClient(**instance) + + url = teamscale.get_global_service_url("external-findings-description") + for finding_desc in all_desc: + finding_id = finding_desc["typeId"] + teamscale.put(url + finding_id, finding_desc) + result_log = (len(all_desc), sys.argv[2]) + print("Successfully uploaded %s findings descriptions from `%s`" % result_log) + + +def get_config_data(): + """ Parses the arguments for the migration tool. """ + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " + "see config.template.") + parser.add_argument("descriptions", help="The findings descriptions") + args = parser.parse_args() + return load_json(args.config), load_json(args.descriptions) + + +def load_json(path): + """ Loads the config data as a JSON and returns it. """ + json_file = Path(path) + if json_file.exists(): + try: + return json.load(json_file.open()) + except json.JSONDecodeError: + logging.getLogger().exception("Config file '%s' is malformed" % path, exc_info=True) + else: + logging.getLogger().exception("Config file '%s' does not exist" % path) + exit(1) + + +if __name__ == "__main__": + main() diff --git a/tools/external-findings-upload/upload_group_description.py b/tools/external-findings-upload/upload_group_description.py new file mode 100755 index 0000000..8e2c545 --- /dev/null +++ b/tools/external-findings-upload/upload_group_description.py @@ -0,0 +1,45 @@ +import argparse +import json +import logging +import urllib.parse +import sys + +from teamscale_client import TeamscaleClient +from pathlib import Path + + +def main(): + instance, group_desc = get_config_data() + teamscale = TeamscaleClient(**instance) + + group_id = urllib.parse.quote(group_desc["groupName"]) + url = teamscale.get_global_service_url("external-findings-groups") + teamscale.put(url+group_id, group_desc) + print("Successfully uploaded group description from `%s`" % sys.argv[2]) + + +def get_config_data(): + """ Parses the arguments for the migration tool. """ + parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("config", help="The path to the config file. Needs to be in a specific format, " + "see config.template.") + parser.add_argument("descriptions", help="The findings descriptions") + args = parser.parse_args() + return load_json(args.config), load_json(args.descriptions) + + +def load_json(path): + """ Loads the config data as a JSON and returns it. """ + json_file = Path(path) + if json_file.exists(): + try: + return json.load(json_file.open()) + except json.JSONDecodeError: + logging.getLogger().exception("Config file '%s' is malformed" % path, exc_info=True) + else: + logging.getLogger().exception("Config file '%s' does not exist" % path) + exit(1) + + +if __name__ == "__main__": + main() \ No newline at end of file From c07c755f971db39d9b9eb9f584130dbf937b5b26 Mon Sep 17 00:00:00 2001 From: Roman Haas Date: Thu, 20 Dec 2018 17:10:10 +0100 Subject: [PATCH 62/79] Path prefix transformation is defined in the 'new_instance'. Additionally, fixed some linting errors (which resulted in unhandled exceptions during task migration) --- tools/migration/migrator_base.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 7d9eedd..654a26a 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -73,9 +73,9 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): def set_prefix_transformations(self, config_data): """ Sets the path prefix transformations for the instances. """ key = "path_prefix_transformation" - if key in config_data and all(x in config_data[key] for x in ["from", "to"]): - regex = re.compile("^" + config_data[key]["from"]) - self.path_transform = lambda x: regex.sub(config_data[key]["to"], x) + if key in config_data['new_instance'] and all(x in config_data['new_instance'][key] for x in ["from", "to"]): + regex = re.compile("^" + config_data['new_instance'][key]["from"]) + self.path_transform = lambda x: regex.sub(config_data['new_instance'][key]["to"], x) else: self.path_transform = lambda x: x @@ -94,7 +94,7 @@ def check_project(self, client): def create_clients(self, config_data): """ Reads the given config defined by its path and creates the two teamscale clients from it. - One old instance (migrating from) and a new onoe (migrating to). + One old instance (migrating from) and a new one (migrating to). """ try: return self.get_client(config_data["old_instance"]), self.get_client(config_data["new_instance"]) @@ -151,7 +151,7 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): self.logger.debug("Service Call: {}".format((url, parameters))) response = client.get(url, parameters).json() except ServiceError as e: - self.logger.exception("Fetching data from %s failed (%s)" % (url, e.response.status_code)) + self.logger.exception("Fetching data from %s failed (%s)" % (url, e)) self.cache_request((url, parameters), response, use_cache) return response @@ -216,9 +216,8 @@ def get_finding_by_id(self, client, finding_id): try: return self.get(client, "findings-by-id", path_suffix=finding_id) except ServiceError as e: - if e.response.status_code == 400: - self.logger.debug("Finding with id %s not found. Skipping." % finding_id) - return None + self.logger.debug("Finding with id %s not found. Skipping." % finding_id) + return None def get_findings_url(self, findings_id, client=None): """ Creates a url link to the finding with the given id on the given Teamscale """ From b8e398803494c581f9001e00ec48e7969e0bdff6 Mon Sep 17 00:00:00 2001 From: Timo Pawelka Date: Thu, 7 Feb 2019 14:56:21 +0100 Subject: [PATCH 63/79] Adjust README --- tools/migration/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/migration/README.md b/tools/migration/README.md index b74f182..f0b64ed 100644 --- a/tools/migration/README.md +++ b/tools/migration/README.md @@ -8,9 +8,9 @@ Up to now there are three scripts you can use: The first two scripts are pretty self-explanatory. Use `config.template` to create your own configuration and give the path to the config file as a parameter when calling either migrator (task or blacklist). -1If there is any kind of `path (suffix) transformation` on either project adjust the optional parameter -`path_prefix_transformation`. This feature is not very in depth, so if you are coming up to extreme example where -both projects have multiple transformation you might need to extend it. +If there is any kind of `path (suffix) transformation` on necessary adjust the optional parameter +`path_prefix_transformation` for the new instance. This feature is not very in depth, so if you are coming up to +an extreme example where both projects have multiple transformation you might need to extend it. ## Batch Migration Both scripts only work for one project at a time. If you want to migrate the blacklist and the tasks of From 99824fb42a3d4db78e1ad99e226d065420f33771 Mon Sep 17 00:00:00 2001 From: Timo Pawelka Date: Mon, 18 Mar 2019 09:44:40 +0100 Subject: [PATCH 64/79] Simplified exception logging --- tools/external-findings-upload/__init__.py | 0 tools/migration/__init__.py | 0 tools/migration/migrator_base.py | 2 +- tools/migration/{ => templates}/batch_config.template | 0 tools/migration/{ => templates}/config.template | 0 5 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 tools/external-findings-upload/__init__.py delete mode 100644 tools/migration/__init__.py rename tools/migration/{ => templates}/batch_config.template (100%) rename tools/migration/{ => templates}/config.template (100%) diff --git a/tools/external-findings-upload/__init__.py b/tools/external-findings-upload/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tools/migration/__init__.py b/tools/migration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index 654a26a..db4dfd6 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -151,7 +151,7 @@ def get(self, client, service, path_suffix="", parameters=None, use_cache=True): self.logger.debug("Service Call: {}".format((url, parameters))) response = client.get(url, parameters).json() except ServiceError as e: - self.logger.exception("Fetching data from %s failed (%s)" % (url, e)) + self.logger.exception("Fetching data from %s failed" % url) self.cache_request((url, parameters), response, use_cache) return response diff --git a/tools/migration/batch_config.template b/tools/migration/templates/batch_config.template similarity index 100% rename from tools/migration/batch_config.template rename to tools/migration/templates/batch_config.template diff --git a/tools/migration/config.template b/tools/migration/templates/config.template similarity index 100% rename from tools/migration/config.template rename to tools/migration/templates/config.template From da3579a0b7f0ffe5187152e24fd576161368dd37 Mon Sep 17 00:00:00 2001 From: Dennis Pagano Date: Fri, 7 Jun 2019 19:26:51 +0200 Subject: [PATCH 65/79] Fixes --- teamscale_client/client.py | 8 ++++--- tools/migration/migrator_base.py | 34 ++++++++++++++++------------ tools/migration/task_migrator.py | 39 ++++++++++++++++++++++++++++---- 3 files changed, 60 insertions(+), 21 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 23aa130..d9b84fc 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -25,9 +25,10 @@ class TeamscaleClient: sslverify: See requests' verify parameter in http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification timeout (float): TTFB timeout in seconds, see http://docs.python-requests.org/en/master/user/quickstart/#timeouts branch (str): The branch name for which to upload/retrieve data + proxies (dict): Dictionary of proxies if any. Note: This needs pysocks installed. """ - def __init__(self, url, username, access_token, project, sslverify=True, timeout=30.0, branch=None): + def __init__(self, url, username, access_token, project, sslverify=True, timeout=30.0, branch=None, proxies=None): """Constructor """ self.url = url @@ -37,6 +38,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch + self.proxies = proxies self.check_api_version() @staticmethod @@ -85,7 +87,7 @@ def get(self, url, parameters=None): """ headers = {'Accept': 'application/json'} response = requests.get(url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, - timeout=self.timeout) + timeout=self.timeout, proxies=self.proxies) if response.status_code != 200: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response @@ -108,7 +110,7 @@ def put(self, url, json=None, parameters=None, data=None): headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} response = requests.put(url, params=parameters, json=json, data=data, headers=headers, auth=self.auth_header, - verify=self.sslverify, timeout=self.timeout) + verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) if response.status_code != 200: raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response diff --git a/tools/migration/migrator_base.py b/tools/migration/migrator_base.py index db4dfd6..7ce3e10 100644 --- a/tools/migration/migrator_base.py +++ b/tools/migration/migrator_base.py @@ -64,6 +64,8 @@ def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False): self.old, self.new = self.create_clients(config_data) self.set_prefix_transformations(config_data) self.check_projects() + self.old_project = self.get_project(self.old).json() + self.new_project = self.get_project(self.new).json() self.migrated = 0 self.cache = {} @@ -86,12 +88,17 @@ def check_projects(self): def check_project(self, client): """ Checks if the project specified in the client actually exists on that client. """ - check_url = "{0.url}/projects/{0.project}" - result = client.get(check_url.format(client)) + result = self.get_project(client) if result.content == b'null': self.logger.error("Project '%s' does not exist" % client.project) exit(1) + @staticmethod + def get_project(client): + check_url = "{0.url}/projects/{0.project}" + result = client.get(check_url.format(client)) + return result + def create_clients(self, config_data): """ Reads the given config defined by its path and creates the two teamscale clients from it. One old instance (migrating from) and a new one (migrating to). @@ -114,7 +121,7 @@ def check_step(self): @staticmethod def get_client(data): """ Creates a teamscale client from the given data """ - return TeamscaleClient(data["url"], data["user"], data["token"], data["project"]) + return TeamscaleClient(data["url"], data["user"], data["token"], data["project"], proxies=data["proxies"]) def check_cache(self, request, use_cache): """ If use_cache is True it checks if the cache already contains the response @@ -188,12 +195,12 @@ def put_in_new(self, service, data, path_suffix="", parameters=None): path_suffix(str): Will be added to the end of the project service URL parameters(dict): Dict with parameters which should be appended to the URL """ + url = self.new.get_project_service_url(service) + path_suffix + self.logger.debug("Service Call: {}".format((url, parameters))) if not self.dry_run: - self.new.put(self.new.get_project_service_url(service) + path_suffix, - parameters=parameters, - json=data) + return self.new.put(url, parameters=parameters, json=data) - def get_matching_finding_id(self, finding_id): + def get_matching_finding_id(self, finding_id, timestamp=None): """ Tries to find a matching finding in the new instance for the given findings id of the old instance. If no match could be found `None` is returned. @@ -203,7 +210,7 @@ def get_matching_finding_id(self, finding_id): return None location = self.path_transform(finding["location"]["uniformPath"]) - new_findings = self.get_from_new("findings", path_suffix=location, parameters={"blacklisted": "all"}) + new_findings = self.get_from_new("findings", path_suffix=location, parameters={"blacklisted": "all", "t": timestamp}) for new_finding in new_findings: if self.match_finding(new_finding, finding): return new_finding["id"] @@ -229,10 +236,10 @@ def match_finding(self, finding1, finding2): """ Checks if the given two findings are the same. """ location_match = self.dicts_match(finding1["location"], finding2["location"], - ["location", "uniformPath", "@class"]) + ["location", "uniformPath", "@class", "rawEndOffset"]) # Exclude category and message, because this might change with an update to a newer TS-version properties_match = self.dicts_match(finding1, finding2, - ["location", "id", "birth", "analysisTimestamp", "message", "categoryName"]) + ["location", "id", "birth", "death", "analysisTimestamp", "message", "categoryName", "siblingLocations"]) return location_match and properties_match @@ -241,11 +248,10 @@ def dicts_match(dict1, dict2, excludes): """ Checks if the given two dicts matches. Excludes is a list containing all keys, which should not be compared """ - if dict1.keys() != dict2.keys(): - return False + common_keys_without_excludes = (dict1.keys() & dict2.keys()) - set(excludes) - location_keys = [x for x in dict1.keys() if x not in excludes] - return all([dict1[x] == dict2[x]] for x in location_keys) + matchings = [dict1[x] == dict2[x] for x in common_keys_without_excludes] + return all(matchings) @abstractmethod def migrate(self): diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index 9400419..b1cc135 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 -from migrator_base import MigratorBase, get_arguments +from teamscale_client.constants import TaskStatus + +from tools.migration.migrator_base import MigratorBase, get_arguments def main(): @@ -12,6 +14,12 @@ class TaskMigrator(MigratorBase): """ Class for migrating tasks between two instances. Tasks will only be migrated if all connected findings are on the new instance as well. """ + def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False, overwrite_tasks=True, findings_timestamp=None, overwrite_tasks_offset=0): + super().__init__(config_data, debug=debug, dry_run=dry_run, step_by_step=step_by_step) + self.overwrite_tasks = overwrite_tasks + self.findings_timestamp = findings_timestamp + self.overwrite_tasks_offset = overwrite_tasks_offset + def migrate(self): """ Migrates the tasks. """ old_tasks = self.get_from_old("tasks", parameters={"details": True}) @@ -22,9 +30,12 @@ def migrate(self): self.logger.info("Migrating %s tasks" % len(old_tasks)) for old_task in old_tasks: old_task_id = old_task["id"] + self.logger.debug('Working on task %i (%s)' % (old_task_id, old_task["status"])) self.adjust_task(old_task) + self.pre_process_task(old_task) self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) - self.add_task(old_task) + new_task_id = self.add_task(old_task) + self.post_process_task(old_task, old_task_id, new_task_id) self.check_step() self.logger.info("Migrated %d/%d tasks" % (self.migrated, len(old_tasks))) @@ -33,14 +44,28 @@ def adjust_task(self, task): """ Before adding the task to the new instance the ids of any connected findings need to be changed to the corresponding findings on the new instance. """ + self.logger.debug('Adjusting %i findings' % len(task["findings"])) for finding in task["findings"]: - matching_finding_id = self.get_matching_finding_id(finding["findingId"]) + self.logger.debug('Searching for finding %s' % finding["findingId"]) + + matching_finding_id = self.get_matching_finding_id(finding["findingId"], self.findings_timestamp) if matching_finding_id is None: self.logger.warn("The finding %s for task %s does not exists on the new instance." % ( self.get_findings_url(finding["findingId"]), task["id"])) else: + self.logger.debug("Found finding %s for task %s on new instance: %s." % ( + self.get_findings_url(finding["findingId"]), task["id"], + self.get_findings_url(matching_finding_id, client=self.new))) finding["findingId"] = matching_finding_id + def pre_process_task(self, task): + """Additional task preprocessing. Default implementation does nothing.""" + pass + + def post_process_task(self, task, old_task_id, new_task_id): + """Additional task postprocessing. Default implementation does nothing.""" + pass + def get_tasks_url(self, task_id, client=None): """ Creates a url of the old instance to the task with the given id. """ if client is None: @@ -50,7 +75,13 @@ def get_tasks_url(self, task_id, client=None): def add_task(self, task): """ Adds a task to the new instance """ self.migrated += 1 - self.put_in_new("tasks", path_suffix=str(task["id"]), data=task) + path_suffix = str(task["id"] + self.overwrite_tasks_offset) if self.overwrite_tasks else '0' + new_task_response = self.put_in_new("tasks", path_suffix=path_suffix, data=task) + new_task_id = new_task_response.json() if new_task_response else 100000 + if task["status"] != TaskStatus.OPEN: + # Need to put it a second time to get the status right + self.put_in_new("tasks", path_suffix=str(new_task_id), data=task) + return new_task_id if __name__ == "__main__": From c4af488f7a63c677dfae92a3ca22e41365dc3ecc Mon Sep 17 00:00:00 2001 From: Thomas Kinnen Date: Tue, 18 Jun 2019 14:24:31 +0200 Subject: [PATCH 66/79] Add socks proxy support --- teamscale_client/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index d9b84fc..d1464d3 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -42,7 +42,7 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.check_api_version() @staticmethod - def from_client_config(config, sslverify=True, timeout=30.0, branch=None): + def from_client_config(config, sslverify=True, timeout=30.0, branch=None, proxies=None): """Creates a new Teamscale client from a `TeamscaleClientConfig` object. Args: @@ -52,7 +52,7 @@ def from_client_config(config, sslverify=True, timeout=30.0, branch=None): branch (str): The branch name for which to upload/retrieve data """ return TeamscaleClient(config.url, config.username, config.access_token, config.project_id, - sslverify, timeout, branch) + sslverify, timeout, branch, proxies=proxies) def set_project(self, project): """Sets the project id for subsequent calls made using the client.""" From e45475aeeb4491a92db5b983da21a97847e8503b Mon Sep 17 00:00:00 2001 From: Dennis Pagano Date: Wed, 17 Jul 2019 11:38:32 +0200 Subject: [PATCH 67/79] Improvements --- teamscale_client/client.py | 3 ++- teamscale_client/data.py | 4 +++- tools/migration/task_migrator.py | 18 ++++++++++++++++-- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index d1464d3..ee5c066 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -623,7 +623,8 @@ def _finding_from_json(self, finding_json): end_offset=self._get_finding_location_entry(finding_json, 'rawEndOffset', 0), start_line=self._get_finding_location_entry(finding_json, 'rawStartLine', 1), end_line=self._get_finding_location_entry(finding_json, 'rawEndLine', 1), - uniform_path=finding_json['location']['uniformPath']) + uniform_path=finding_json['location']['uniformPath'], + resolved='death' in finding_json) def _get_finding_location_entry(self, finding_json, key, defaultValue): """Safely extracts a value from the location data of a JSON encoded finding. diff --git a/teamscale_client/data.py b/teamscale_client/data.py index 0aba0cf..75712f9 100644 --- a/teamscale_client/data.py +++ b/teamscale_client/data.py @@ -35,7 +35,8 @@ class Finding(object): """ def __init__(self, finding_type_id, message, assessment=Assessment.YELLOW, start_offset=None, end_offset=None, - start_line=None, end_line=None, identifier=None, uniform_path=None, finding_properties=None): + start_line=None, end_line=None, identifier=None, uniform_path=None, finding_properties=None, + resolved=False): self.findingTypeId = finding_type_id self.message = message self.assessment = assessment @@ -47,6 +48,7 @@ def __init__(self, finding_type_id, message, assessment=Assessment.YELLOW, start self.uniformPath = uniform_path self.findingProperties = finding_properties + self.resolved = resolved def __cmp__(self, other): """Compares this finding to another finding.""" diff --git a/tools/migration/task_migrator.py b/tools/migration/task_migrator.py index b1cc135..944e940 100755 --- a/tools/migration/task_migrator.py +++ b/tools/migration/task_migrator.py @@ -14,10 +14,12 @@ class TaskMigrator(MigratorBase): """ Class for migrating tasks between two instances. Tasks will only be migrated if all connected findings are on the new instance as well. """ - def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False, overwrite_tasks=True, findings_timestamp=None, overwrite_tasks_offset=0): + def __init__(self, config_data, debug=False, dry_run=False, step_by_step=False, overwrite_tasks=True, + findings_timestamp=None, get_findings_timestamp_from_task_creation=False, overwrite_tasks_offset=0): super().__init__(config_data, debug=debug, dry_run=dry_run, step_by_step=step_by_step) self.overwrite_tasks = overwrite_tasks self.findings_timestamp = findings_timestamp + self.get_findings_timestamp_from_task_creation = get_findings_timestamp_from_task_creation self.overwrite_tasks_offset = overwrite_tasks_offset def migrate(self): @@ -31,6 +33,9 @@ def migrate(self): for old_task in old_tasks: old_task_id = old_task["id"] self.logger.debug('Working on task %i (%s)' % (old_task_id, old_task["status"])) + if not self.filter_task(old_task): + self.logger.debug('Skipping task %i (%s)' % (old_task_id, old_task["status"])) + continue self.adjust_task(old_task) self.pre_process_task(old_task) self.logger.info("Migrating task %s" % self.get_tasks_url(old_task_id)) @@ -45,10 +50,15 @@ def adjust_task(self, task): to be changed to the corresponding findings on the new instance. """ self.logger.debug('Adjusting %i findings' % len(task["findings"])) + + timestamp_for_finding_on_new_instance = self.findings_timestamp + if self.get_findings_timestamp_from_task_creation: + timestamp_for_finding_on_new_instance = task["created"] + for finding in task["findings"]: self.logger.debug('Searching for finding %s' % finding["findingId"]) - matching_finding_id = self.get_matching_finding_id(finding["findingId"], self.findings_timestamp) + matching_finding_id = self.get_matching_finding_id(finding["findingId"], timestamp_for_finding_on_new_instance) if matching_finding_id is None: self.logger.warn("The finding %s for task %s does not exists on the new instance." % ( self.get_findings_url(finding["findingId"]), task["id"])) @@ -58,6 +68,10 @@ def adjust_task(self, task): self.get_findings_url(matching_finding_id, client=self.new))) finding["findingId"] = matching_finding_id + def filter_task(self, task): + """Additional task filter. Default implementation does nothing.""" + return True + def pre_process_task(self, task): """Additional task preprocessing. Default implementation does nothing.""" pass From 0dd04722d3aed8048e12781df7c6af25f33c2e7d Mon Sep 17 00:00:00 2001 From: Nils Kunze Date: Thu, 19 Dec 2019 14:50:48 +0100 Subject: [PATCH 68/79] manually generate the correct service url when fetching tasks --- teamscale_client/client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 6b2bfb5..608fd76 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -524,6 +524,9 @@ def get_global_service_url(self, service_name): """ return "%s/%s/" % (self.url, service_name) + def get_new_project_service_url(self, service_name): + return "{client.url}/api/projects/{client.project}/{service}/".format(client=self, service=service_name) + def get_project_service_url(self, service_name): """Returns the full url pointing to a project service. @@ -730,7 +733,7 @@ def get_tasks(self, status="OPEN", details=True, start=0, max=300): Raises: ServiceError: If anything goes wrong """ - service_url = self.get_project_service_url("tasks") + service_url = self.get_new_project_service_url("tasks") parameters = { "status": status, "details": details, From 6fe13a797e02d2e6a2df86feda33202bf94c1bfa Mon Sep 17 00:00:00 2001 From: Nils Kunze Date: Mon, 15 Jun 2020 18:07:43 +0200 Subject: [PATCH 69/79] workaround for posting task comments --- teamscale_client/client.py | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 9466750..2867049 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -115,6 +115,29 @@ def put(self, url, json=None, parameters=None, data=None): raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response + def post(self, url, json=None, parameters=None, data=None): + """Sends a PUT request to the given service url with the json payload as content. + + Args: + url (str): The URL for which to execute a PUT request + json: The Object to attach as content, will be serialized to json (only for object that can be serialized by default) + parameters (dict): parameters to attach to the url + data: The data object to be attached to the request + + Returns: + requests.Response: request's response + + Raises: + ServiceError: If anything goes wrong + """ + headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} + response = requests.post(url, params=parameters, json=json, data=data, + headers=headers, auth=self.auth_header, + verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) + if response.status_code != 200: + raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + return response + def delete(self, url, parameters=None): """Sends a DELETE request to the given service url. @@ -762,8 +785,8 @@ def add_task_comment(self, task_id, comment): Raises: ServiceError: If anything goes wrong """ - service_url = self.get_project_service_url("comment-task") + str(task_id) - response = self.put(service_url, data=to_json(comment)) + service_url = self.get_new_project_service_url("tasks") + str(task_id) + "/comments" + response = self.post(service_url, data=to_json(comment)) if response.status_code != 200: raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return response From d3389e84790414bd8f4d6de5a8a38f7fb43f9e41 Mon Sep 17 00:00:00 2001 From: Nils Kunze Date: Thu, 13 May 2021 20:36:35 -0700 Subject: [PATCH 70/79] add blacklisted parameter to get_findings to enable fetching false positives --- teamscale_client/client.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 2867049..76d8365 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -656,6 +656,7 @@ def _finding_from_json(self, finding_json): start_line=self._get_finding_location_entry(finding_json, 'rawStartLine', 1), end_line=self._get_finding_location_entry(finding_json, 'rawEndLine', 1), uniform_path=finding_json['location']['uniformPath'], + finding_properties=finding_json['properties'], finding_id=finding_json['id'], resolved='death' in finding_json) @@ -678,7 +679,7 @@ def _get_finding_location_entry(self, finding_json, key, defaultValue): return value - def get_findings(self, uniform_path, timestamp, recursive=True): + def get_findings(self, uniform_path, timestamp, recursive=True, blacklisted="excluded"): """Retrieves the list of findings in the currently active project for the given uniform path at the provided timestamp on the given branch. @@ -687,6 +688,8 @@ def get_findings(self, uniform_path, timestamp, recursive=True): timestamp (datetime.datetime): timestamp (unix format) for which to upload the data recursive (bool): Whether to query findings recursively, i.e. also get findings for files under the given path. + blacklisted (str): Whether to exclude or include blacklisted findings or focus on them entirely (set to + only_false_positives or only_tolerated for that) Returns: List[:class:`data.Finding`]): The list of findings. @@ -698,7 +701,8 @@ def get_findings(self, uniform_path, timestamp, recursive=True): parameters = { "t": self._get_timestamp_parameter(timestamp=timestamp), "recursive": recursive, - "all": True + "all": True, + "blacklisted": blacklisted } response = self.get(service_url, parameters=parameters) if response.status_code != 200: From 873a952e9a6a6891084257bbbb382ee5f0505524 Mon Sep 17 00:00:00 2001 From: Nils Kunze Date: Mon, 28 Feb 2022 16:30:28 -0800 Subject: [PATCH 71/79] add functionality for querying findings counts and descriptions --- teamscale_client/client.py | 50 ++++++++++++++++++++++++++++++++++---- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 76d8365..e31c644 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -116,10 +116,10 @@ def put(self, url, json=None, parameters=None, data=None): return response def post(self, url, json=None, parameters=None, data=None): - """Sends a PUT request to the given service url with the json payload as content. + """Sends a POST request to the given service url with the json payload as content. Args: - url (str): The URL for which to execute a PUT request + url (str): The URL for which to execute a POST request json: The Object to attach as content, will be serialized to json (only for object that can be serialized by default) parameters (dict): parameters to attach to the url data: The data object to be attached to the request @@ -132,10 +132,10 @@ def post(self, url, json=None, parameters=None, data=None): """ headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} response = requests.post(url, params=parameters, json=json, data=data, - headers=headers, auth=self.auth_header, - verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) + headers=headers, auth=self.auth_header, + verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) if response.status_code != 200: - raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response def delete(self, url, parameters=None): @@ -709,6 +709,46 @@ def get_findings(self, uniform_path, timestamp, recursive=True, blacklisted="exc raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return self._findings_from_json(response.json()) + def get_findings_summary(self, uniform_path, timestamp, recursive=True, blacklisted="excluded"): + service_url = self.get_new_project_service_url("findings/summary") + parameters = { + "t": self._get_timestamp_parameter(timestamp=timestamp), + "uniform-path": uniform_path, + "blacklisted": blacklisted, + "recursive": recursive, + "report-categories-without-findings": True + } + response = self.get(service_url, parameters=parameters) + if response.status_code != 200: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + return response.json() + + def get_findings_with_count(self, uniform_path, timestamp, recursive=True, blacklisted="excluded", filter=""): + service_url = self.get_new_project_service_url("findings") + "list/with-count" + parameters = { + "t": self._get_timestamp_parameter(timestamp=timestamp), + "uniform-path": uniform_path, + "blacklisted": blacklisted, + "filter": filter, + "recursive": recursive, + "all": True, + } + response = self.get(service_url, parameters=parameters) + if response.status_code != 200: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + return response.json() + + def get_findings_descriptions(self, language): + service_url = self.get_global_service_url("api/language-rules") + f"{self.project}" + parameters = { + "language": language + } + response = self.get(service_url, parameters=parameters) + if response.status_code != 200: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + print(response.url) + return response.json() + def get_finding_by_id(self, finding_id, branch=None, timestamp=None): """Retrieves the finding with the given id. From 73b2d87f026bd362967cd904e1a89349ad0adf4a Mon Sep 17 00:00:00 2001 From: Nils Kunze Date: Mon, 28 Feb 2022 16:46:51 -0800 Subject: [PATCH 72/79] remove unnecessary proxy parameter --- teamscale_client/client.py | 47 +++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index e31c644..573448a 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -25,10 +25,9 @@ class TeamscaleClient: sslverify: See requests' verify parameter in http://docs.python-requests.org/en/latest/user/advanced/#ssl-cert-verification timeout (float): TTFB timeout in seconds, see http://docs.python-requests.org/en/master/user/quickstart/#timeouts branch (str): The branch name for which to upload/retrieve data - proxies (dict): Dictionary of proxies if any. Note: This needs pysocks installed. """ - def __init__(self, url, username, access_token, project, sslverify=True, timeout=30.0, branch=None, proxies=None): + def __init__(self, url, username, access_token, project, sslverify=True, timeout=30.0, branch=None): """Constructor """ self.url = url @@ -38,11 +37,10 @@ def __init__(self, url, username, access_token, project, sslverify=True, timeout self.sslverify = sslverify self.timeout = timeout self.branch = branch - self.proxies = proxies self.check_api_version() @staticmethod - def from_client_config(config, sslverify=True, timeout=30.0, branch=None, proxies=None): + def from_client_config(config, sslverify=True, timeout=30.0, branch=None): """Creates a new Teamscale client from a `TeamscaleClientConfig` object. Args: @@ -52,7 +50,7 @@ def from_client_config(config, sslverify=True, timeout=30.0, branch=None, proxie branch (str): The branch name for which to upload/retrieve data """ return TeamscaleClient(config.url, config.username, config.access_token, config.project_id, - sslverify, timeout, branch, proxies=proxies) + sslverify, timeout, branch) def set_project(self, project): """Sets the project id for subsequent calls made using the client.""" @@ -87,7 +85,7 @@ def get(self, url, parameters=None): """ headers = {'Accept': 'application/json'} response = requests.get(url, params=parameters, auth=self.auth_header, verify=self.sslverify, headers=headers, - timeout=self.timeout, proxies=self.proxies) + timeout=self.timeout) if response.status_code != 200: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response @@ -110,7 +108,7 @@ def put(self, url, json=None, parameters=None, data=None): headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} response = requests.put(url, params=parameters, json=json, data=data, headers=headers, auth=self.auth_header, - verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) + verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: raise ServiceError("ERROR: PUT {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response @@ -133,7 +131,7 @@ def post(self, url, json=None, parameters=None, data=None): headers = {'Accept': 'application/json', 'Content-Type': 'application/json'} response = requests.post(url, params=parameters, json=json, data=data, headers=headers, auth=self.auth_header, - verify=self.sslverify, timeout=self.timeout, proxies=self.proxies) + verify=self.sslverify, timeout=self.timeout) if response.status_code != 200: raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response @@ -710,6 +708,22 @@ def get_findings(self, uniform_path, timestamp, recursive=True, blacklisted="exc return self._findings_from_json(response.json()) def get_findings_summary(self, uniform_path, timestamp, recursive=True, blacklisted="excluded"): + """Gets the list of finding summaries for an element or a resource sub-tree. + + Args: + uniform_path (str): The uniform path to get findings for. + timestamp (datetime.datetime): timestamp (unix format) for which to upload the data + recursive (bool): Whether to query findings recursively, i.e. also get findings for files under the given + path. + blacklisted (str): Whether to exclude or include blacklisted findings or focus on them entirely (set to + only_false_positives or only_tolerated for that) + + Returns: + Json encoded response + + Raises: + ServiceError: If anything goes wrong + """ service_url = self.get_new_project_service_url("findings/summary") parameters = { "t": self._get_timestamp_parameter(timestamp=timestamp), @@ -723,21 +737,6 @@ def get_findings_summary(self, uniform_path, timestamp, recursive=True, blacklis raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return response.json() - def get_findings_with_count(self, uniform_path, timestamp, recursive=True, blacklisted="excluded", filter=""): - service_url = self.get_new_project_service_url("findings") + "list/with-count" - parameters = { - "t": self._get_timestamp_parameter(timestamp=timestamp), - "uniform-path": uniform_path, - "blacklisted": blacklisted, - "filter": filter, - "recursive": recursive, - "all": True, - } - response = self.get(service_url, parameters=parameters) - if response.status_code != 200: - raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) - return response.json() - def get_findings_descriptions(self, language): service_url = self.get_global_service_url("api/language-rules") + f"{self.project}" parameters = { @@ -785,7 +784,7 @@ def get_finding_url(self, finding): """ if not finding.finding_id: return None - return "{client.url}/findings.html#details/{client.project}/?id={finding_id}"\ + return "{client.url}/findings.html#details/{client.project}/?id={finding_id}" \ .format(client=self, finding_id=finding.finding_id) def get_tasks(self, status="OPEN", details=True, start=0, max=300): From bcf4f372c2e2e5ce290e56af1f20c0a933c730df Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Thu, 10 Aug 2023 16:22:02 -0700 Subject: [PATCH 73/79] Support code for ongoing work on temp findings script --- teamscale_client/client.py | 62 +++++++++++++ teamscale_client/merge_request.py | 144 ++++++++++++++++++++++++++++++ 2 files changed, 206 insertions(+) create mode 100644 teamscale_client/merge_request.py diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 46d4e47..58819c1 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -10,6 +10,7 @@ from requests.auth import HTTPBasicAuth from teamscale_client.data import ServiceError, Baseline, ProjectInfo, Finding, Task +from teamscale_client.merge_request import MergeRequest, FindingsChurnCount from teamscale_client.utils import to_json @@ -138,6 +139,28 @@ def post(self, url, json=None, parameters=None, data=None): raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response + def post(self, url, parameters=None, data=None): + """Sends a POST request to the given service url with the json payload as content. + + Args: + url (str): The URL for which to execute a POST request + parameters (dict): parameters to attach to the url + data ([str]): Array of form url encoded data to be sent in the request payload + + Returns: + requests.Response: request's response + + Raises: + ServiceError: If anything goes wrong + """ + headers = {'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'} + response = requests.post(url, params=parameters, data=data, + headers=headers, auth=self.auth_header, + verify=self.sslverify, timeout=self.timeout) + if response.status_code != 200: + raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=url, r=response)) + return response + def delete(self, url, parameters=None): """Sends a DELETE request to the given service url. @@ -715,6 +738,7 @@ def get_findings(self, uniform_path, timestamp, recursive=True, blacklisted="exc Raises: ServiceError: If anything goes wrong + """ service_url = self.get_project_service_url("findings") + uniform_path parameters = { @@ -918,3 +942,41 @@ def get_architectures(self): if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return [architecture_overview['uniformPath'] for architecture_overview in response.json()] + + def get_merge_requests(self): + """Returns all the merge requests known to Teamscale given the project config. + + Returns: + List[MergeRequest] The list of merge requests. + """ + service_url = self.get_new_project_service_url("merge-requests") + response = self.get(service_url) + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + for merge_request in response.json()['mergeRequests']: + print(merge_request) #TODO: for debugging. Remove later + + return [MergeRequest.from_json(merge_request) for merge_request in response.json()['mergeRequests']] + + def get_mr_findings_churn(self, merge_request): + """Returns the findings churn count for a given merge request + + Returns: + List[FindingsChurnCount] The list of merge requests. + """ + service_url = self.get_new_project_service_url("merge-requests/"+merge_request.get_id_with_repo() + .replace("/", "%2F")+"/delta") + response = self.get(service_url) + if response.status_code == 204: + # MR not analyzed + return + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + findings_churn_from_json = response.json()['findingChurn'] + finding_churn_count = FindingsChurnCount(len(findings_churn_from_json['addedFindings']), + len(findings_churn_from_json['findingsAddedInBranch']), + len(findings_churn_from_json['findingsInChangedCode']), + len(findings_churn_from_json['removedFindings']), + len(findings_churn_from_json['findingsRemovedInBranch'])) + + return finding_churn_count diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py new file mode 100644 index 0000000..8e5b465 --- /dev/null +++ b/teamscale_client/merge_request.py @@ -0,0 +1,144 @@ +from teamscale_client.utils import auto_str + + +@auto_str +class MergeRequest(object): + """Represents a Merge Request in Teamscale + + Args: + id (int): The merge request id + id_with_repo (str): The merge request id appended to the repository name + status (long): The merge request status + title (str): The merge request title + source_branch (str): The merge request source branch + source_head (str): The commit sha on head (source branch) + target_branch (str): The branch to which the MR is intended to merge + url (str): The full SCM url of the branch + voting_record (VotingRecord): the voting data associated with the MR + findings_churn_count (FindingsChurnCount): the findings churn stats + """ + # TODO not getting all the identifier fields from the reponse. Let's add the rest if needed + def __init__(self, id, id_with_repo, status, title, source_branch, source_head, target_branch, url, voting_record, + findings_churn_count): + self.id = id + self.id_with_repo = id_with_repo + self.status = status + self.title = title + self.source_branch = source_branch + self.source_head = source_head + self.target_branch = target_branch + self.url = url + self.voting_record = voting_record + self.findings_churn_count = findings_churn_count + + def print_mr(self): + print(f"{self.id_with_repo} - {self.status} : {self.url}") + + def get_id_with_repo(self): + return self.id_with_repo + + def get_source_branch(self): + return self.source_branch + + @classmethod + def from_json(cls, json): + try: + voting = VotingRecord.from_json(json['votingRecord']) + except KeyError: + voting = None + + try: + findings_churn = FindingsChurnCount.from_json(json['findingChurnCount']) + except KeyError: + findings_churn = None + + return MergeRequest(json['mergeRequest']['identifier']['id'], + json['mergeRequest']['identifier']['idWithRepository'], + json['mergeRequest']['status'], json['mergeRequest']['title'], + json['mergeRequest']['sourceBranch'], json['mergeRequest']['sourceHead'], + json['mergeRequest']['targetBranch'], json['mergeRequest']['url'], voting, findings_churn) + + +@auto_str +class VotingRecord(object): + """Represents a Teamscale Voting Record for a given MR in Teamscale + + Args: + timestamp (double): When Teamscale created the voting record + state (str): The voting state + commit (Commit): The merge request title + comment (str): The merge request source branch + partitions_included_in_vote ([str]): The commit sha on head (source branch) + """ + + def __init__(self, timestamp, state, commit, comment, partitions_included_in_vote): + self.timestamp = timestamp + self.state = state + self.commit = commit + self.comment = comment + self.partitions_included_in_vote = partitions_included_in_vote + + @classmethod + def from_json(cls, json): + return VotingRecord(json['timestamp'], json['state'], Commit.from_json(json['commit']), json['comment'], + json['partitionsIncludedInVote']) + + +@auto_str +class Commit(object): + """Represents a Teamscale commit in the voting context + + Args: + type (str): Type of voting commit + branch_name (str): the branch name where voting happened + timestamp (double): when Teamscale created the voting commit + """ + # TODO: not getting here parentCommits field. Let's add it if necessary + def __init__(self, type, branch_name, timestamp): + self.type = type + self.branch_name = branch_name + self.timestamp = timestamp + + @classmethod + def from_json(cls, json): + return Commit(json['type'], json['branchName'], json['timestamp']) + + +@auto_str +class FindingsChurnCount(object): + """Represents a findings churn count for the MR + + Args: + added_findings (int): The number of added findings + findings_added_in_branch (int): The number of findings added in branch (only on merges) + findings_in_changed_code (int): The number of findings that are old and in changed code + removed_findings (int): The number of removed findings + findings_removed_in_branch (int): The number of findings removed in branch (only on merges) + """ + + def __init__(self, added_findings, findings_added_in_branch, findings_in_changed_code, removed_findings, + findings_removed_in_branch): + self.added_findings = added_findings + self.findings_added_in_branch = findings_added_in_branch + self.findings_in_changed_code = findings_in_changed_code + self.removed_findings = removed_findings + self.findings_removed_in_branch = findings_removed_in_branch + + def get_added_findings(self): + return self.added_findings + + def get_findings_added_in_branch(self): + return self.findings_added_in_branch + + def get_findings_in_changed_code(self): + return self.findings_in_changed_code + + def get_removed_findings(self): + return self.removed_findings + + def get_findings_removed_in_branch(self): + return self.findings_removed_in_branch + @classmethod + def from_json(cls, json): + return FindingsChurnCount(json['addedFindings'], json['findingsAddedInBranch'], json['findingsInChangedCode'], + json['removedFindings'], json['findingsRemovedInBranch']) From cea5a61ace41d04c49f547a19985383462f2052e Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Thu, 10 Aug 2023 18:44:51 -0700 Subject: [PATCH 74/79] Added new supporting methods to get finding churn by commit timestamp --- teamscale_client/client.py | 61 +++++++++++++++++++++++++++++-- teamscale_client/merge_request.py | 10 +++++ 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 58819c1..18165b4 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -139,8 +139,8 @@ def post(self, url, json=None, parameters=None, data=None): raise ServiceError("ERROR: POST {url}: {r.status_code}:{r.text}".format(url=url, r=response)) return response - def post(self, url, parameters=None, data=None): - """Sends a POST request to the given service url with the json payload as content. + def post_url_encoded_data(self, url, parameters=None, data=None): + """Sends a POST request to the given service url with the url encoded data as payload. Args: url (str): The URL for which to execute a POST request @@ -961,6 +961,8 @@ def get_merge_requests(self): def get_mr_findings_churn(self, merge_request): """Returns the findings churn count for a given merge request + Args: + merge_request (MergeRequest): The merge request object Returns: List[FindingsChurnCount] The list of merge requests. """ @@ -968,7 +970,7 @@ def get_mr_findings_churn(self, merge_request): .replace("/", "%2F")+"/delta") response = self.get(service_url) if response.status_code == 204: - # MR not analyzed + # MR not analyzed by TS return if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) @@ -978,5 +980,56 @@ def get_mr_findings_churn(self, merge_request): len(findings_churn_from_json['findingsInChangedCode']), len(findings_churn_from_json['removedFindings']), len(findings_churn_from_json['findingsRemovedInBranch'])) - return finding_churn_count + + def get_mr_commits_timestamps(self, merge_request): + """Fecthes the MR delta and returns a list of timestamps from the merge request commits + + Args: + merge_request (MergeRequest): The merge request object + Returns: + List[double] The list of timestamps corresponding to the merge request commits + """ + service_url = self.get_new_project_service_url("merge-requests/"+merge_request.get_id_with_repo() + .replace("/", "%2F")+"/delta") + response = self.get(service_url) + if response.status_code == 204: + # MR not analyzed + return + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + timestamps = [] + for ancestorSource in response.json()['mergeBase']['ancestorsOfSource']: + timestamps.append(ancestorSource['timestamp']) + return timestamps + + def get_commits_findings_churn(self, source_branch, commit_timestamps): + """Fecthes the findings churn count for a list of commits + Args: + source_branch (str): The source branch name usually taken from a merge request + commit_timestamps (List[double]): List of commit timestamps + + Returns: + List[FindingsChurnCount] The list of findings churn count for every commit that matched the input + """ + service_url = self.get_new_project_service_url("finding-churn/count") + encoded_branch_and_timestamps = "" + source_branch = source_branch.replace("/", "%2F") + for timestamp in commit_timestamps: + encoded_branch_and_timestamps += "commit=" + source_branch + "%3A" + str(timestamp) + "&" + if len(encoded_branch_and_timestamps) > 0: + encoded_branch_and_timestamps.rstrip(encoded_branch_and_timestamps[-1]) + response = self.post_url_encoded_data(service_url, None, encoded_branch_and_timestamps) + if response.status_code == 204: + # MR not analyzed by TS + return + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + findings_churn_list = [] + for finding_churn in response.json(): + if finding_churn is None: + continue + findings_churn_list.append(FindingsChurnCount.from_json(finding_churn)) + return findings_churn_list + # TODO: how to make the following one line work by skipping the finding churns that are none? + # return [FindingsChurnCount.from_json(finding_churn) for finding_churn in response.json()] diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index 8e5b465..382a0ba 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -138,7 +138,17 @@ def get_removed_findings(self): def get_findings_removed_in_branch(self): return self.findings_removed_in_branch + + def print_churn(self): + print(" => Findings Churn Count:") + print(f" Added: {self.get_added_findings()}") + print(f" Added in branch: {self.get_findings_added_in_branch()}") + print(f" In changed code: {self.get_findings_in_changed_code()}") + print(f" Removed: {self.get_removed_findings()}") + print(f" Removed in branch: {self.get_findings_removed_in_branch()}") @classmethod def from_json(cls, json): + if json is None: + return return FindingsChurnCount(json['addedFindings'], json['findingsAddedInBranch'], json['findingsInChangedCode'], json['removedFindings'], json['findingsRemovedInBranch']) From c94609b6b938bdad14d4eaf27c6306fab922957a Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Fri, 18 Aug 2023 17:37:10 -0700 Subject: [PATCH 75/79] Support code for ongoing work on temp findings script --- teamscale_client/client.py | 37 +++++++++++++++++++++++++------ teamscale_client/merge_request.py | 3 +++ 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 18165b4..4339a4a 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -943,18 +943,23 @@ def get_architectures(self): raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) return [architecture_overview['uniformPath'] for architecture_overview in response.json()] - def get_merge_requests(self): + def get_merge_requests(self, status='OPEN'): """Returns all the merge requests known to Teamscale given the project config. + Args: + status (str): The merge request status filter (OPEN, MERGED, OTHER) Returns: List[MergeRequest] The list of merge requests. """ service_url = self.get_new_project_service_url("merge-requests") - response = self.get(service_url) + parameters = { + "status": status + } + response = self.get(service_url, parameters) if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) - for merge_request in response.json()['mergeRequests']: - print(merge_request) #TODO: for debugging. Remove later + # for merge_request in response.json()['mergeRequests']: + # print(merge_request) #TODO: for debugging. Remove later return [MergeRequest.from_json(merge_request) for merge_request in response.json()['mergeRequests']] @@ -983,7 +988,7 @@ def get_mr_findings_churn(self, merge_request): return finding_churn_count def get_mr_commits_timestamps(self, merge_request): - """Fecthes the MR delta and returns a list of timestamps from the merge request commits + """Fetches the MR delta and returns a list of timestamps from the merge request commits Args: merge_request (MergeRequest): The merge request object @@ -1003,8 +1008,26 @@ def get_mr_commits_timestamps(self, merge_request): timestamps.append(ancestorSource['timestamp']) return timestamps - def get_commits_findings_churn(self, source_branch, commit_timestamps): - """Fecthes the findings churn count for a list of commits + def get_commit_findings(self, branch, timestamp): + """Fetches the findings churn count for a list of commits + Args: + branch (str): The source branch + timestamp (double): The commit timestamp + + Returns: + json The raw json response + """ + service_url = self.get_new_project_service_url("finding-churn/list") + parameters = { + "t": branch + ":" + str(timestamp) + } + response = self.get(service_url, parameters) + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + return response.json() + + def get_commit_findings_churn(self, source_branch, commit_timestamps): + """Fetches the findings churn count for a list of commits Args: source_branch (str): The source branch name usually taken from a merge request commit_timestamps (List[double]): List of commit timestamps diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index 382a0ba..7dd2393 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -34,6 +34,9 @@ def __init__(self, id, id_with_repo, status, title, source_branch, source_head, def print_mr(self): print(f"{self.id_with_repo} - {self.status} : {self.url}") + def print_teamscale_mr_url(self, teamscale_base_url, project): + print(f"{self.id_with_repo} - {self.status} : {teamscale_base_url}{project}/{self.id_with_repo}") + def get_id_with_repo(self): return self.id_with_repo From 82ae08e3fa1a4ea7cdb601665be0bc5373a9fc51 Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Mon, 21 Aug 2023 17:45:57 -0700 Subject: [PATCH 76/79] Refactoring the solution and changing output --- teamscale_client/client.py | 7 +- teamscale_client/merge_request.py | 177 ++++++++++++++++++++++++++++++ 2 files changed, 181 insertions(+), 3 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 4339a4a..cd7265a 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -10,7 +10,7 @@ from requests.auth import HTTPBasicAuth from teamscale_client.data import ServiceError, Baseline, ProjectInfo, Finding, Task -from teamscale_client.merge_request import MergeRequest, FindingsChurnCount +from teamscale_client.merge_request import MergeRequest, FindingsChurnCount, CommitFindings from teamscale_client.utils import to_json @@ -1015,7 +1015,7 @@ def get_commit_findings(self, branch, timestamp): timestamp (double): The commit timestamp Returns: - json The raw json response + CommitFindings The commit findings data """ service_url = self.get_new_project_service_url("finding-churn/list") parameters = { @@ -1024,7 +1024,8 @@ def get_commit_findings(self, branch, timestamp): response = self.get(service_url, parameters) if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) - return response.json() + # return response.json() TODO: remove + return CommitFindings.from_json(response.json()) def get_commit_findings_churn(self, source_branch, commit_timestamps): """Fetches the findings churn count for a list of commits diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index 7dd2393..15e9ecc 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -1,3 +1,5 @@ +from datetime import datetime + from teamscale_client.utils import auto_str @@ -17,6 +19,7 @@ class MergeRequest(object): voting_record (VotingRecord): the voting data associated with the MR findings_churn_count (FindingsChurnCount): the findings churn stats """ + # TODO not getting all the identifier fields from the reponse. Let's add the rest if needed def __init__(self, id, id_with_repo, status, title, source_branch, source_head, target_branch, url, voting_record, findings_churn_count): @@ -96,6 +99,7 @@ class Commit(object): branch_name (str): the branch name where voting happened timestamp (double): when Teamscale created the voting commit """ + # TODO: not getting here parentCommits field. Let's add it if necessary def __init__(self, type, branch_name, timestamp): self.type = type @@ -149,9 +153,182 @@ def print_churn(self): print(f" In changed code: {self.get_findings_in_changed_code()}") print(f" Removed: {self.get_removed_findings()}") print(f" Removed in branch: {self.get_findings_removed_in_branch()}") + @classmethod def from_json(cls, json): if json is None: return return FindingsChurnCount(json['addedFindings'], json['findingsAddedInBranch'], json['findingsInChangedCode'], json['removedFindings'], json['findingsRemovedInBranch']) + + +@auto_str +class CommitFindings(object): + """Represents findings data related to a commit + + Args: + removed_findings_count (int): + added_findings_count (int): + removed_findings (List[Finding]): + added_findings(List[Finding]): + """ + + def __init__(self, removed_findings_count, added_findings_count, removed_findings, added_findings): + self.removed_findings_count = removed_findings_count + self.added_findings_count = added_findings_count + self.removed_findings = removed_findings + self.added_findings = added_findings + + def get_removed_findings_count(self): + return self.removed_findings_count + + def get_removed_findings(self): + return self.removed_findings + + @classmethod + def from_json(cls, json): + if json is None: + return + return CommitFindings(json['removedFindingsCount'], json['addedFindingsCount'], + [Finding.from_json(removed_f) for removed_f in json['removedFindings']], + [Finding.from_json(added_f) for added_f in json['addedFindings']]) + + +@auto_str +class Finding(object): + """Represents a Teamscale finding + + Args: + f_id (str): Finding unique id in Teamscale + group_name (str): Finding group in Teamscale + category_name (str): Finding categoru in Teamscale + message (str): Finding message in Teamscale + assessment (constants.Assessment): The assessment this finding should have. Default is `YELLOW`. + This value is only important if in Teamscale the finding enablement + is set to auto, otherwise the setting from Teamscale will be used. + location (FindingLocation): location data for the finding + birth(FindingBirth): + death(FindingDeath): + """ + + def __init__(self, f_id, group_name, category_name, message, assessment, location, birth, death): + self.f_id = f_id + self.group_name = group_name + self.category_name = category_name + self.message = message + self.assessment = assessment + self.location = location + self.birth = birth + self.death = death + + def get_birth(self): + return self.birth + + def get_id(self): + return self.f_id + + def print(self): + print(" Finding id: " + self.f_id) + print(" Finding uniformPath: " + self.location.get_uniform_path()) + print(" Finding message: " + self.message) + print(" Introduction date/time: " + str(datetime.fromtimestamp(self.birth.get_timestamp() / 1000))) + if self.death: + print(" Removal date/time: " + str(datetime.fromtimestamp(self.death.get_timestamp() / 1000))) + + @classmethod + def from_json(cls, json): + if json is None: + return + try: + death = FindingDeath.from_json(json['death']) + except KeyError: + death = None + return Finding(json['id'], json['groupName'], json['categoryName'], json['message'], json['assessment'], + FindingLocation.from_json(json['location']), FindingBirth.from_json(json['birth']), death) + + +@auto_str +class FindingLocation(object): + """Represents the location of a Teamscale finding + + Args: + f_type (str): Finding type in Teamscale + uniform_path (str): Uniform path of the finding group in Teamscale + raw_start_offset (int): Start offset of the finding + raw_end_offset (int): End offset of the finding + raw_start_line (int): Line where the finding starts + raw_end_line(int): Line where the finding ends + location(str): file path where the finding is located + """ + + def __init__(self, f_type, uniform_path, raw_start_offset, raw_end_offset, raw_start_line, raw_end_line, location): + self.f_type = f_type + self.uniform_path = uniform_path + self.raw_start_offset = raw_start_offset + self.raw_end_offset = raw_end_offset + self.raw_start_line = raw_start_line + self.raw_end_line = raw_end_line + self.location = location + + def get_uniform_path(self): + return self.uniform_path + + @classmethod + def from_json(cls, json): + if json is None: + return + return FindingLocation(json['type'], json['uniformPath'], json['rawStartOffset'], json['rawEndOffset'], + json['rawStartLine'], json['rawEndLine'], json['location']) + + +@auto_str +class FindingBirth(object): + """Represents data about the finding introduction + + Args: + f_type (str): Type of finding introduction + branch_name (str): Branch where the finding was introduced + timestamp (double): When the finding was introduced + """ + + def __init__(self, f_type, branch_name, timestamp): + self.f_type = f_type + self.branch_name = branch_name + self.timestamp = timestamp + + def get_timestamp(self): + return self.timestamp + + def get_branch_name(self): + return self.branch_name + + @classmethod + def from_json(cls, json): + if json is None: + return + return FindingBirth(json['type'], json['branchName'], json['timestamp']) + + +@auto_str +class FindingDeath(object): + """Represents data about the finding removal + + Args: + f_type (str): Type of finding removal + branch_name (str): Branch where the finding was removed + timestamp (double): When the finding was removed + """ + + def __init__(self, f_type, branch_name, timestamp): + self.f_type = f_type + self.branch_name = branch_name + self.timestamp = timestamp + + def get_timestamp(self): + return self.timestamp + + @classmethod + def from_json(cls, json): + if json is None: + return + return FindingDeath(json['type'], json['branchName'], json['timestamp']) From b229c08cdbbbcb3b6eece751817ae411aae62718 Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Tue, 22 Aug 2023 17:53:54 -0700 Subject: [PATCH 77/79] Fixed a bug --- teamscale_client/client.py | 7 +------ teamscale_client/merge_request.py | 31 +++++++++++++++++++++++++++---- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index cd7265a..67d4fe7 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -958,8 +958,6 @@ def get_merge_requests(self, status='OPEN'): response = self.get(service_url, parameters) if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) - # for merge_request in response.json()['mergeRequests']: - # print(merge_request) #TODO: for debugging. Remove later return [MergeRequest.from_json(merge_request) for merge_request in response.json()['mergeRequests']] @@ -1024,7 +1022,6 @@ def get_commit_findings(self, branch, timestamp): response = self.get(service_url, parameters) if not response.ok: raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) - # return response.json() TODO: remove return CommitFindings.from_json(response.json()) def get_commit_findings_churn(self, source_branch, commit_timestamps): @@ -1054,6 +1051,4 @@ def get_commit_findings_churn(self, source_branch, commit_timestamps): if finding_churn is None: continue findings_churn_list.append(FindingsChurnCount.from_json(finding_churn)) - return findings_churn_list - # TODO: how to make the following one line work by skipping the finding churns that are none? - # return [FindingsChurnCount.from_json(finding_churn) for finding_churn in response.json()] + return findings_churn_list \ No newline at end of file diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index 15e9ecc..cff3058 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -20,7 +20,6 @@ class MergeRequest(object): findings_churn_count (FindingsChurnCount): the findings churn stats """ - # TODO not getting all the identifier fields from the reponse. Let's add the rest if needed def __init__(self, id, id_with_repo, status, title, source_branch, source_head, target_branch, url, voting_record, findings_churn_count): self.id = id @@ -40,6 +39,9 @@ def print_mr(self): def print_teamscale_mr_url(self, teamscale_base_url, project): print(f"{self.id_with_repo} - {self.status} : {teamscale_base_url}{project}/{self.id_with_repo}") + def get_id(self): + return self.id + def get_id_with_repo(self): return self.id_with_repo @@ -100,7 +102,6 @@ class Commit(object): timestamp (double): when Teamscale created the voting commit """ - # TODO: not getting here parentCommits field. Let's add it if necessary def __init__(self, type, branch_name, timestamp): self.type = type self.branch_name = branch_name @@ -224,6 +225,9 @@ def __init__(self, f_id, group_name, category_name, message, assessment, locatio def get_birth(self): return self.birth + def get_death(self): + return self.death + def get_id(self): return self.f_id @@ -277,8 +281,24 @@ def get_uniform_path(self): def from_json(cls, json): if json is None: return - return FindingLocation(json['type'], json['uniformPath'], json['rawStartOffset'], json['rawEndOffset'], - json['rawStartLine'], json['rawEndLine'], json['location']) + try: + raw_start_offset = json['rawStartOffset'] + except KeyError: + raw_start_offset = None + try: + raw_end_offset = json['rawEndOffset'] + except KeyError: + raw_end_offset = None + try: + raw_start_line = json['rawStartLine'] + except KeyError: + raw_start_line = None + try: + raw_end_line = json['rawEndLine'] + except KeyError: + raw_end_line = None + return FindingLocation(json['type'], json['uniformPath'], raw_start_offset, raw_end_offset, + raw_start_line, raw_end_line, json['location']) @auto_str @@ -327,6 +347,9 @@ def __init__(self, f_type, branch_name, timestamp): def get_timestamp(self): return self.timestamp + def get_branch_name(self): + return self.branch_name + @classmethod def from_json(cls, json): if json is None: From 9cef9087bba94f19b05478e93fe59c685bc09155 Mon Sep 17 00:00:00 2001 From: Kesina Baral Date: Thu, 31 Aug 2023 15:14:40 -0700 Subject: [PATCH 78/79] test gap analysis work --- teamscale_client/client.py | 45 +++++++++++++++++++++++++++++++ teamscale_client/merge_request.py | 3 +++ 2 files changed, 48 insertions(+) diff --git a/teamscale_client/client.py b/teamscale_client/client.py index 67d4fe7..ca31811 100644 --- a/teamscale_client/client.py +++ b/teamscale_client/client.py @@ -961,6 +961,51 @@ def get_merge_requests(self, status='OPEN'): return [MergeRequest.from_json(merge_request) for merge_request in response.json()['mergeRequests']] + '''returns test gap treemap for given project, branch and timestamps''' + + def get_testgap_result(self, merge_request,timestamp): + service_url = self.get_global_service_url("api/projects") + f"{self.project}"+"/test-gaps/treemap" + parameters = { + "baseline": merge_request.get_source_branch()+":"+timestamp, + "end": merge_request.get_target_branch()+":"+timestamp, + "merge-request-mode": "true", + "merge-request-identifier": merge_request.get_id_with_repo(), + "auto-select-branch": "false", + "include-child-issues": "false", + "only-executed-methods": "false", + "exclude-unchanged-methods": "true", + "churn": "false", + "execution-only": "false", + "all-partitions": "true" + } + try: + response = self.get(service_url, parameters=parameters) + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + return response.json() + except ServiceError: + print("No treemap because did not find actual commit for merge ", merge_request.get_id()) + return + + '''Returns all commits in the merge request''' + def get_mr_commits(self, merge_request): + + service_url = self.get_new_project_service_url("merge-requests/repository-churn") + + parameters = { + "source": merge_request.get_source_branch()+":HEAD", + "target": merge_request.get_target_branch()+":HEAD" + } + try: + response = self.get(service_url, parameters=parameters) + if not response.ok: + raise ServiceError("ERROR: GET {url}: {r.status_code}:{r.text}".format(url=service_url, r=response)) + return response.json().get('logEntries') + except ServiceError: + print("Did not find actual commit for merge ", merge_request.get_id()) + return + + def get_mr_findings_churn(self, merge_request): """Returns the findings churn count for a given merge request diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index cff3058..a2a30a7 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -48,6 +48,9 @@ def get_id_with_repo(self): def get_source_branch(self): return self.source_branch + def get_target_branch(self): + return self.target_branch + @classmethod def from_json(cls, json): try: From cb5cbf3ba47e8798daf22dca1d5b4bd34d3f3cc0 Mon Sep 17 00:00:00 2001 From: Bruno da Silva Date: Thu, 14 Sep 2023 15:29:42 -0700 Subject: [PATCH 79/79] Added helper method in merge_request data class --- teamscale_client/merge_request.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/teamscale_client/merge_request.py b/teamscale_client/merge_request.py index a2a30a7..f215c05 100644 --- a/teamscale_client/merge_request.py +++ b/teamscale_client/merge_request.py @@ -39,6 +39,9 @@ def print_mr(self): def print_teamscale_mr_url(self, teamscale_base_url, project): print(f"{self.id_with_repo} - {self.status} : {teamscale_base_url}{project}/{self.id_with_repo}") + def get_teamscale_mr_url(self, teamscale_base_url, project): + return f"{teamscale_base_url}{project}/{self.id_with_repo}" + def get_id(self): return self.id