diff --git a/web/requirements.txt b/web/requirements.txt index a801999c2c..9adeba6af9 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -7,8 +7,8 @@ Authlib<2 requests<3 # Required by Authlib. Not installed automatically for some reason. lxml<6 -sqlalchemy<2 -alembic<2 +sqlalchemy~=2.0 +alembic~=1.5 portalocker<4 psutil<8 multiprocess<0.71 diff --git a/web/requirements_py/db_pg8000/requirements.txt b/web/requirements_py/db_pg8000/requirements.txt index d1c62f0a6c..dbb0613aac 100644 --- a/web/requirements_py/db_pg8000/requirements.txt +++ b/web/requirements_py/db_pg8000/requirements.txt @@ -1,7 +1,7 @@ lxml<6 -sqlalchemy<2 -alembic<2 -pg8000<=1.31.4 +sqlalchemy~=2.0 +alembic~=1.5 +pg8000~=1.31 psutil<8 portalocker<4 diff --git a/web/requirements_py/db_psycopg2/requirements.txt b/web/requirements_py/db_psycopg2/requirements.txt index 7fb207af38..9f36b602c1 100644 --- a/web/requirements_py/db_psycopg2/requirements.txt +++ b/web/requirements_py/db_psycopg2/requirements.txt @@ -1,7 +1,7 @@ lxml<6 -sqlalchemy<2 -alembic<2 -psycopg2-binary<=2.9.10 +sqlalchemy~=2.0 +alembic~=1.5 +psycopg2-binary~=2.9 psutil<8 portalocker<4 diff --git a/web/requirements_py/dev/requirements.txt b/web/requirements_py/dev/requirements.txt index 562311b72e..4a6c8214a9 100644 --- a/web/requirements_py/dev/requirements.txt +++ b/web/requirements_py/dev/requirements.txt @@ -1,6 +1,6 @@ pycodestyle<=2.12.0 -psycopg2-binary<=2.9.10 -pg8000<=1.31.4 +psycopg2-binary~=2.9 +pg8000~=1.31 pylint<3.3 pytest<=7.3.1 mkdocs<=1.5.3 diff --git a/web/server/codechecker_server/api/mass_store_run.py b/web/server/codechecker_server/api/mass_store_run.py index 06f1ff2ae6..6534ba0738 100644 --- a/web/server/codechecker_server/api/mass_store_run.py +++ b/web/server/codechecker_server/api/mass_store_run.py @@ -412,8 +412,7 @@ def __init__(self, self.user_name = user_name with DBSession(self._config_db) as session: - product: Optional[Product] = session.query(Product) \ - .get(product_id) + product: Optional[Product] = session.get(Product, product_id) if not product: raise KeyError(f"No product with ID '{product_id}'") @@ -576,8 +575,8 @@ def _implementation(self, tm: TaskManager): raise with DBSession(tm.configuration_database_session_factory) as session: - db_product: Optional[Product] = session.query(Product) \ - .get(self._product_id) + db_product: Optional[Product] = \ + session.get(Product, self._product_id) if not db_product: raise KeyError(f"No product with ID '{self._product_id}'") @@ -668,7 +667,7 @@ def __init__(self, str, Tuple[Report, Union[DBReport, int]]] = {} with DBSession(config_db) as session: - product = session.query(Product).get(self.__product.id) + product = session.get(Product, self.__product.id) self.__report_limit = product.report_limit def __store_source_files( @@ -793,7 +792,7 @@ def __add_file_content( hasher.update(source_file_content) content_hash = hasher.hexdigest() - file_content = session.query(FileContent).get(content_hash) + file_content = session.get(FileContent, content_hash) if not file_content: if not source_file_content: source_file_content = get_file_content(source_file_name) @@ -1587,7 +1586,7 @@ def finish_checker_run( """ Finish the storage of the given run. """ try: LOG.debug("Finishing checker run") - run = session.query(Run).get(run_id) + run = session.get(Run, run_id) if not run: return False diff --git a/web/server/codechecker_server/api/product_server.py b/web/server/codechecker_server/api/product_server.py index 4cd2992caa..51f69ca800 100644 --- a/web/server/codechecker_server/api/product_server.py +++ b/web/server/codechecker_server/api/product_server.py @@ -13,6 +13,7 @@ import os import random +from sqlalchemy import text from sqlalchemy.sql.expression import and_ from sqlalchemy import create_engine, exc @@ -244,7 +245,7 @@ def getCurrentProduct(self): msg) with DBSession(self.__session) as session: - prod = session.query(Product).get(self.__product.id) + prod = session.get(Product, self.__product.id) if not prod: msg = "The product requested has been disconnected from the " \ @@ -270,7 +271,7 @@ def getProductConfiguration(self, product_id): ], {'productID': product_id}) with DBSession(self.__session) as session: - product = session.query(Product).get(product_id) + product = session.get(Product, product_id) if product is None: msg = f"Product with ID {product_id} does not exist!" LOG.error(msg) @@ -354,7 +355,7 @@ def __create_product_database(self, product): db_pass = convert.from_b64(product_info.password_b64) db_name = product_info.database - engine_url = URL( + engine_url = URL.create( drivername=db_engine, username=db_user, password=db_pass, @@ -365,9 +366,9 @@ def __create_product_database(self, product): engine = create_engine(engine_url) try: with engine.connect() as conn: - conn.execute("commit") + conn.execute(text("commit")) LOG.info("Creating database '%s'", db_name) - conn.execute(f"CREATE DATABASE {db_name}") + conn.execute(text(f"CREATE DATABASE {db_name}")) conn.close() except exc.ProgrammingError as e: LOG.error("ProgrammingError occurred: %s", str(e)) @@ -555,7 +556,7 @@ def editProduct(self, product_id, new_config): new_configuration. """ with DBSession(self.__session) as session: - product = session.query(Product).get(product_id) + product = session.get(Product, product_id) if product is None: msg = f"Product with ID {product_id} does not exist!" LOG.error(msg) @@ -735,7 +736,7 @@ def editProduct(self, product_id, new_config): LOG.info("Product configuration edited and saved successfully.") if product_needs_reconnect: - product = session.query(Product).get(product_id) + product = session.get(Product, product_id) LOG.info("Product change requires database reconnection...") LOG.debug("Disconnecting...") @@ -762,7 +763,7 @@ def removeProduct(self, product_id): self.__require_permission([permissions.SUPERUSER]) with DBSession(self.__session) as session: - product = session.query(Product).get(product_id) + product = session.get(Product, product_id) if product is None: msg = f"Product with ID {product_id} does not exist!" LOG.error(msg) diff --git a/web/server/codechecker_server/api/report_server.py b/web/server/codechecker_server/api/report_server.py index 02f466f752..4cb5edc2a3 100644 --- a/web/server/codechecker_server/api/report_server.py +++ b/web/server/codechecker_server/api/report_server.py @@ -26,7 +26,7 @@ import sqlalchemy from sqlalchemy.sql.expression import or_, and_, not_, func, \ - asc, desc, union_all, select, bindparam, literal_column, case, cast + asc, desc, union_all, select, bindparam, literal_column, case, cast, true from sqlalchemy.orm import contains_eager from sqlalchemy.types import ARRAY, String @@ -216,25 +216,29 @@ def process_report_filter( OR = [File.filepath.ilike(conv(fp)) for fp in report_filter.filepath] - AND.append(or_(*OR)) - join_tables.append(File) + if OR: + AND.append(or_(*OR)) + join_tables.append(File) if report_filter.checkerMsg: OR = [Report.checker_message.ilike(conv(cm)) for cm in report_filter.checkerMsg] - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.analyzerNames or report_filter.checkerName \ or report_filter.severity: if report_filter.analyzerNames: OR = [Checker.analyzer_name.ilike(conv(an)) for an in report_filter.analyzerNames] - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.checkerName: OR = [Checker.checker_name.ilike(conv(cn)) for cn in report_filter.checkerName] - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.severity: AND.append(Checker.severity.in_(report_filter.severity)) @@ -244,8 +248,9 @@ def process_report_filter( if report_filter.runName: OR = [Run.name.ilike(conv(rn)) for rn in report_filter.runName] - AND.append(or_(*OR)) - join_tables.append(Run) + if OR: + AND.append(or_(*OR)) + join_tables.append(Run) if report_filter.reportHash: OR = [] @@ -260,15 +265,16 @@ def process_report_filter( if no_joker: OR.append(Report.bug_id.in_(no_joker)) - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.cleanupPlanNames: OR = [] for cleanup_plan_name in report_filter.cleanupPlanNames: - q = select([CleanupPlanReportHash.bug_hash]) \ + q = select(CleanupPlanReportHash.bug_hash) \ .where( CleanupPlanReportHash.cleanup_plan_id.in_( - select([CleanupPlan.id]) + select(CleanupPlan.id) .where(CleanupPlan.name == cleanup_plan_name) .distinct() )) \ @@ -276,7 +282,8 @@ def process_report_filter( OR.append(Report.bug_id.in_(q)) - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.reportStatus: dst = list(map(detection_status_str, @@ -298,7 +305,8 @@ def process_report_filter( if ReportStatus.CLOSED in report_filter.reportStatus: OR.append(not_(filter_query)) - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.detectionStatus: dst = list(map(detection_status_str, @@ -308,7 +316,8 @@ def process_report_filter( if report_filter.reviewStatus: OR = [Report.review_status.in_( list(map(review_status_str, report_filter.reviewStatus)))] - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.firstDetectionDate is not None: date = datetime.fromtimestamp(report_filter.firstDetectionDate) @@ -346,7 +355,8 @@ def process_report_filter( '%Y-%m-%d %H:%M:%S.%f') OR.append(and_(Report.detected_at <= date, or_( Report.fixed_at.is_(None), Report.fixed_at >= date))) - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) if report_filter.componentNames: if report_filter.componentMatchesAnyPoint: @@ -392,9 +402,10 @@ def process_report_filter( for v in values])) if values else and_( ReportAnnotations.key == key)) - AND.append(or_(*OR)) + if OR: + AND.append(or_(*OR)) - filter_expr = and_(*AND) + filter_expr = and_(*AND) if AND else true() return filter_expr, join_tables @@ -416,7 +427,7 @@ def process_source_component_filter(session, component_names): if file_query is not None: OR.append(file_query) - return or_(*OR) + return or_(*OR) if OR else true() def filter_open_reports_in_tags(results, run_ids, tag_ids): @@ -477,12 +488,12 @@ def get_include_skip_queries( To get the include and skip lists use the 'get_component_values' function. """ - include_q = select([File.id]) \ + include_q = select(File.id) \ .where(or_(*[ File.filepath.like(conv(fp)) for fp in include])) \ .distinct() - skip_q = select([File.id]) \ + skip_q = select(File.id) \ .where(or_(*[ File.filepath.like(conv(fp)) for fp in skip])) \ .distinct() @@ -1234,7 +1245,7 @@ def get_cleanup_plan(session, cleanup_plan_id: int) -> CleanupPlan: Check if the given cleanup id exists in the database and returns the cleanup. Otherwise it will raise an exception. """ - cleanup_plan = session.query(CleanupPlan).get(cleanup_plan_id) + cleanup_plan = session.get(CleanupPlan, cleanup_plan_id) if not cleanup_plan: raise codechecker_api_shared.ttypes.RequestFailed( @@ -1298,7 +1309,7 @@ def process_rs_rule_filter( ): """ Process review status rule filter. """ if rule_filter: - if rule_filter.reportHashes is not None: + if rule_filter.reportHashes: OR = [ReviewStatus.bug_hash.ilike(conv(report_hash)) for report_hash in rule_filter.reportHashes] query = query.filter(or_(*OR)) @@ -1308,7 +1319,7 @@ def process_rs_rule_filter( ReviewStatus.status.in_( map(review_status_str, rule_filter.reviewStatuses))) - if rule_filter.authors is not None: + if rule_filter.authors: OR = [ReviewStatus.author.ilike(conv(author)) for author in rule_filter.authors] query = query.filter(or_(*OR)) @@ -1367,7 +1378,7 @@ def get_is_enabled_case(subquery): )) return case( - [(detection_status_filters, False)], + (detection_status_filters, False), else_=True ) @@ -1393,7 +1404,7 @@ def get_is_opened_case(subquery): review_status_str, review_statuses))) ] return case( - [(and_(*detection_and_review_status_filters), True)], + (and_(*detection_and_review_status_filters), True), else_=False ) @@ -1850,9 +1861,9 @@ def getDiffResultsHash(self, run_ids, report_hashes, diff_type, base_hashes, run_ids, tag_ids) if self._product.driver_name == 'postgresql': - new_hashes = select([ + new_hashes = select( func.unnest(cast(report_hashes, ARRAY(String))) - .label('bug_id')]) \ + .label('bug_id')) \ .except_(base_hashes).alias('new_bugs') return [res[0] for res in session.query(new_hashes)] else: @@ -1865,10 +1876,10 @@ def getDiffResultsHash(self, run_ids, report_hashes, diff_type, for chunk in util.chunks( iter(report_hashes), SQLITE_MAX_COMPOUND_SELECT): new_hashes_query = union_all(*[ - select([bindparam('bug_id' + str(i), h) - .label('bug_id')]) + select(bindparam('bug_id' + str(i), h) + .label('bug_id')) for i, h in enumerate(chunk)]) - q = select([new_hashes_query.subquery()]) \ + q = select(new_hashes_query.subquery()) \ .except_(base_hashes) new_hashes.extend([ res[0] for res in session.query(q.subquery())]) @@ -1983,10 +1994,10 @@ def getRunResults(self, run_ids, limit, offset, sort_types, annotation_cols = OrderedDict() for col in annotation_keys: - annotation_cols[col] = func.max(sqlalchemy.case([( + annotation_cols[col] = func.max(sqlalchemy.case(( ReportAnnotations.key == col, cast(ReportAnnotations.value, - report_annotation_types[col]["db"]))])) \ + report_annotation_types[col]["db"])))) \ .label(f"annotation_{col}") if report_filter.isUnique: @@ -2147,7 +2158,7 @@ def getRunResults(self, run_ids, limit, offset, sort_types, # prevents it. q = q.order_by(Report.id) - if report_filter.annotations is not None: + if report_filter.annotations: annotations = defaultdict(list) for annotation in report_filter.annotations: annotations[annotation.first].append(annotation.second) @@ -2348,7 +2359,7 @@ def _setReviewStatus(self, session, report_hash, status, session object has to be used. """ - review_status = session.query(ReviewStatus).get(report_hash) + review_status = session.get(ReviewStatus, report_hash) if review_status is None: review_status = ReviewStatus() review_status.bug_hash = report_hash @@ -2452,7 +2463,7 @@ def isReviewStatusChangeDisabled(self): self.__require_view() with DBSession(self._config_database) as session: - product = session.query(Product).get(self._product.id) + product = session.get(Product, self._product.id) return product.is_review_status_change_disabled @exc_to_thrift_reqfail @@ -2465,7 +2476,7 @@ def changeReviewStatus(self, report_id, status, message): permissions.PRODUCT_STORE]) with DBSession(self._Session) as session: - report = session.query(Report).get(report_id) + report = session.get(Report, report_id) if report: # False positive and intentional reports are considered closed, # so their "fix date" is set. The reports are reopened when @@ -2619,7 +2630,7 @@ def getComments(self, report_id): self.__require_view() with DBSession(self._Session) as session: - report = session.query(Report).get(report_id) + report = session.get(Report, report_id) if report: result = [] @@ -2651,7 +2662,7 @@ def getCommentCount(self, report_id): """ self.__require_view() with DBSession(self._Session) as session: - report = session.query(Report).get(report_id) + report = session.get(Report, report_id) commentCount = 0 if report: commentCount = session.query(Comment) \ @@ -2672,7 +2683,7 @@ def addComment(self, report_id, comment_data): 'The comment message can not be empty!') with DBSession(self._Session) as session: - report = session.query(Report).get(report_id) + report = session.get(Report, report_id) if report: comment = self.__add_comment(report.bug_id, comment_data.message) @@ -2705,7 +2716,7 @@ def updateComment(self, comment_id, content): user = self._get_username() - comment = session.query(Comment).get(comment_id) + comment = session.get(Comment, comment_id) if comment: if comment.author not in ('Anonymous', user): raise codechecker_api_shared.ttypes.RequestFailed( @@ -2750,7 +2761,7 @@ def removeComment(self, comment_id): with DBSession(self._Session) as session: - comment = session.query(Comment).get(comment_id) + comment = session.get(Comment, comment_id) if comment: if comment.author not in ('Anonymous', user): raise codechecker_api_shared.ttypes.RequestFailed( @@ -2843,7 +2854,7 @@ def getSourceFileData(self, fileId, fileContent, encoding): """ self.__require_view() with DBSession(self._Session) as session: - sourcefile = session.query(File).get(fileId) + sourcefile = session.get(File, fileId) if sourcefile is None: return SourceFileData() @@ -2880,7 +2891,7 @@ def getBlameInfo(self, fileId): self.__require_view() with DBSession(self._Session) as session: - sourcefile = session.query(File).get(fileId) + sourcefile = session.get(File, fileId) if sourcefile is None: return BlameInfo() @@ -3465,8 +3476,6 @@ def getFileCounts(self, run_ids, report_filter, cmp_data, limit, offset): distinct_file_path = distinct_file_path.limit(limit) \ .offset(offset) - distinct_file_path = distinct_file_path.subquery() - count_col = Report.bug_id.distinct() if \ report_filter.isUnique else Report.bug_id @@ -3837,7 +3846,7 @@ def updateRunData(self, run_id, new_run_name): raise codechecker_api_shared.ttypes.RequestFailed( codechecker_api_shared.ttypes.ErrorCode.DATABASE, msg) - run_data = session.query(Run).get(run_id) + run_data = session.get(Run, run_id) if run_data: old_run_name = run_data.name run_data.name = new_run_name @@ -3874,7 +3883,7 @@ def addSourceComponent(self, name, value, description): """ self.__require_admin() with DBSession(self._Session) as session: - component = session.query(SourceComponent).get(name) + component = session.get(SourceComponent, name) user = self._auth_session.user if self._auth_session else None if component: @@ -3937,7 +3946,7 @@ def removeSourceComponent(self, name): self.__require_admin() with DBSession(self._Session) as session: - component = session.query(SourceComponent).get(name) + component = session.get(SourceComponent, name) if component: session.delete(component) session.commit() @@ -3960,7 +3969,7 @@ def getMissingContentHashes(self, file_hashes): with DBSession(self._Session) as session: q = session.query(FileContent) \ - .options(sqlalchemy.orm.load_only('content_hash')) \ + .options(sqlalchemy.orm.load_only(FileContent.content_hash)) \ .filter(FileContent.content_hash.in_(file_hashes)) return list(set(file_hashes) - @@ -3977,7 +3986,7 @@ def getMissingContentHashesForBlameInfo(self, file_hashes): with DBSession(self._Session) as session: q = session.query(FileContent) \ - .options(sqlalchemy.orm.load_only('content_hash')) \ + .options(sqlalchemy.orm.load_only(FileContent.content_hash)) \ .filter(FileContent.content_hash.in_(file_hashes)) \ .filter(FileContent.blame_info.isnot(None)) diff --git a/web/server/codechecker_server/api/tasks.py b/web/server/codechecker_server/api/tasks.py index eca624ca63..dc2a1fb09d 100644 --- a/web/server/codechecker_server/api/tasks.py +++ b/web/server/codechecker_server/api/tasks.py @@ -132,7 +132,7 @@ def getTaskInfo(self, token: str) -> TaskInfo: Returns the `TaskInfo` for the task identified by `token`. """ with DBSession(self._config_db) as session: - db_task: Optional[DBTask] = session.query(DBTask).get(token) + db_task: Optional[DBTask] = session.get(DBTask, token) if not db_task: raise RequestFailed(ErrorCode.GENERAL, f"Task '{token}' does not exist!") @@ -145,7 +145,7 @@ def getTaskInfo(self, token: str) -> TaskInfo: should_set_consumed_flag = db_task.is_in_terminated_state elif db_task.product_id is not None: associated_product: Optional[Product] = \ - session.query(Product).get(db_task.product_id) + session.get(Product, db_task.product_id) if not associated_product: LOG.error("No product with ID '%d', but a task is " "associated with it.", @@ -201,8 +201,8 @@ def getTasks(self, filters: TaskFilter) -> List[AdministratorTaskInfo]: {"config_db_session": session, "productID": prod_id}, self._auth_session)] if no_admin_products: - no_admin_products = [session.query(Product) - .get(product_id).endpoint + no_admin_products = [session.get(Product, product_id) + .endpoint for product_id in no_admin_products] # pylint: disable=consider-using-f-string raise RequestFailed(ErrorCode.UNAUTHORIZED, @@ -341,7 +341,7 @@ def cancelTask(self, token: str) -> bool: ErrorCode.UNAUTHORIZED, "cancelTask() requires server-level SUPERUSER rights.") - db_task: Optional[DBTask] = session.query(DBTask).get(token) + db_task: Optional[DBTask] = session.get(DBTask, token) if not db_task: raise RequestFailed(ErrorCode.GENERAL, f"Task '{token}' does not exist!") diff --git a/web/server/codechecker_server/database/config_db_model.py b/web/server/codechecker_server/database/config_db_model.py index b98b9d81d7..3e85e6384f 100644 --- a/web/server/codechecker_server/database/config_db_model.py +++ b/web/server/codechecker_server/database/config_db_model.py @@ -14,7 +14,7 @@ from sqlalchemy import Boolean, CHAR, Column, DateTime, Enum, ForeignKey, \ Integer, MetaData, String, Text, UniqueConstraint -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from sqlalchemy.sql.expression import false from ..permissions import get_permissions diff --git a/web/server/codechecker_server/database/database.py b/web/server/codechecker_server/database/database.py index b3f207aac0..0647091352 100644 --- a/web/server/codechecker_server/database/database.py +++ b/web/server/codechecker_server/database/database.py @@ -396,15 +396,15 @@ def create_engine(self): # FIXME: workaround for locking errors # FIXME: why is the connection used by multiple threads # is that a problem ??? do we need some extra locking??? - engine = sqlalchemy.create_engine(self.get_connection_string(), - encoding='utf8', - connect_args={'timeout': 600, - 'check_same_thread': False}, - poolclass=NullPool) + engine = sqlalchemy.create_engine( + self.get_connection_string(), + connect_args={'timeout': 600, 'check_same_thread': False}, + poolclass=NullPool) else: - engine = sqlalchemy.create_engine(self.get_connection_string(), - encoding='utf8', - poolclass=NullPool) + engine = sqlalchemy.create_engine( + self.get_connection_string(), + client_encoding='utf8', + poolclass=NullPool) self._register_engine_hooks(engine) return engine @@ -556,13 +556,14 @@ def _get_connection_string(self, database): extra_args = {} if driver == "psycopg2": extra_args = {'client_encoding': 'utf8'} - return str(URL('postgresql+' + driver, - username=self.user, - password=password, - host=self.host, - port=str(self.port), - database=database, - query=extra_args)) + return URL.create( + drivername='postgresql+' + driver, + username=self.user, + password=password, + host=self.host, + port=str(self.port), + database=database, + query=extra_args).render_as_string(hide_password=False) def connect(self, init=False): """ @@ -653,7 +654,9 @@ def connect(self, init=False): return self.check_schema() def get_connection_string(self) -> str: - return str(URL('sqlite+pysqlite', None, None, None, None, self.dbpath)) + return str(URL.create( + drivername='sqlite+pysqlite', + database=self.dbpath)) def get_db_location(self): return self.dbpath diff --git a/web/server/codechecker_server/database/run_db_model.py b/web/server/codechecker_server/database/run_db_model.py index c8eaec963f..27919bc47b 100644 --- a/web/server/codechecker_server/database/run_db_model.py +++ b/web/server/codechecker_server/database/run_db_model.py @@ -15,7 +15,7 @@ from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer, \ LargeBinary, MetaData, String, UniqueConstraint, Table, Text -from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import declarative_base from sqlalchemy.orm import relationship from sqlalchemy.sql.expression import true, false diff --git a/web/server/codechecker_server/migrations/config/env.py b/web/server/codechecker_server/migrations/config/env.py index 1493aab3b9..1e66f0961f 100644 --- a/web/server/codechecker_server/migrations/config/env.py +++ b/web/server/codechecker_server/migrations/config/env.py @@ -43,14 +43,7 @@ def run_migrations_online(): In this scenario we need to create an Engine and associate a connection with the context. """ - connectable = config.attributes.get('connection', None) - if connectable is None: - connectable = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - with connectable.connect() as connection: + def migrate(connection): context.configure( connection=connection, target_metadata=target_metadata @@ -61,6 +54,18 @@ def run_migrations_online(): with context.begin_transaction(): context.run_migrations() + connection = config.attributes.get('connection', None) + if connection: + migrate(connection) + else: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + with connectable.connect() as connection: + migrate(connection) + if context.is_offline_mode(): raise NotImplementedError(f"Offline '{schema}' migration is not possible!") diff --git a/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py b/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py index 9f15fb5e96..cff2adb232 100644 --- a/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py +++ b/web/server/codechecker_server/migrations/config/versions/126fa3f55e4b_added_permission_for_product_view.py @@ -34,17 +34,19 @@ def upgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute(f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}") + op.execute(sa.text( + f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}")) # Create the new enum. new_type.create(op.get_bind()) # # Alter detection status column. - op.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " - f"TYPE {type_name} USING {column_name}::text::{type_name}") + op.execute(sa.text( + f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " + f"TYPE {type_name} USING {column_name}::text::{type_name}")) # Drop the old enum. - op.execute(f"DROP TYPE {tmp_type_name}") + op.execute(sa.text(f"DROP TYPE {tmp_type_name}")) elif dialect == 'sqlite': with op.batch_alter_table(table_name) as batch_op: batch_op.alter_column( @@ -57,17 +59,19 @@ def downgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute(f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}") + op.execute(sa.text( + f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}")) # Create the new enum. old_type.create(op.get_bind()) # Alter detection status column. - op.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " - f"TYPE {type_name} USING {column_name}::text::{type_name}") + op.execute(sa.text( + f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " + f"TYPE {type_name} USING {column_name}::text::{type_name}")) # Drop the old enum. - op.execute(f"DROP TYPE {tmp_type_name}") + op.execute(sa.text(f"DROP TYPE {tmp_type_name}")) elif dialect == 'sqlite': with op.batch_alter_table('table_name') as batch_op: batch_op.alter_column( diff --git a/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py b/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py index caf4813059..298bcdae6c 100644 --- a/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py +++ b/web/server/codechecker_server/migrations/config/versions/4db450cf38af_add_extra_product_detail_columns.py @@ -37,7 +37,8 @@ def upgrade(): try: product_con = op.get_bind() products = product_con.execute( - "SELECT id, endpoint, connection FROM products").fetchall() + sa.text("SELECT id, endpoint, connection FROM products")) \ + .fetchall() context = webserver_context.get_context() for id_, endpoint, connection in products: @@ -47,18 +48,19 @@ def upgrade(): engine = sa.create_engine(sql_server.get_connection_string()) conn = engine.connect() - run_info = conn.execute("SELECT COUNT(*), MAX(date) FROM runs") \ + run_info = conn.execute( + sa.text("SELECT COUNT(*), MAX(date) FROM runs")) \ .fetchone() values = [f"num_of_runs={run_info[0]}"] if run_info[1]: values.append(f"latest_storage_date='{run_info[1]}'") - product_con.execute(f""" + product_con.execute(sa.text(f""" UPDATE products SET {', '.join(values)} WHERE id={id_} - """) + """)) except Exception as ex: LOG.error("Failed to fill product detail columns (num_of_runs, " "latest_storage_date): %s", ex) diff --git a/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py b/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py index 28895abc15..2b5a43afb6 100644 --- a/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py +++ b/web/server/codechecker_server/migrations/config/versions/7829789fc19c_global_permission_to_get_access_controls.py @@ -35,17 +35,19 @@ def upgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute(f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}") + op.execute(sa.text( + f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}")) # Create the new enum. new_type.create(op.get_bind()) # # Alter detection status column. - op.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " - f"TYPE {type_name} USING {column_name}::text::{type_name}") + op.execute(sa.text( + f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " + f"TYPE {type_name} USING {column_name}::text::{type_name}")) # Drop the old enum. - op.execute(f"DROP TYPE {tmp_type_name}") + op.execute(sa.text(f"DROP TYPE {tmp_type_name}")) elif dialect == 'sqlite': with op.batch_alter_table(table_name) as batch_op: batch_op.alter_column( @@ -58,17 +60,19 @@ def downgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute(f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}") + op.execute(sa.text( + f"ALTER TYPE {type_name} RENAME TO {tmp_type_name}")) # Create the new enum. old_type.create(op.get_bind()) # Alter detection status column. - op.execute(f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " - f"TYPE {type_name} USING {column_name}::text::{type_name}") + op.execute(sa.text( + f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " + f"TYPE {type_name} USING {column_name}::text::{type_name}")) # Drop the old enum. - op.execute(f"DROP TYPE {tmp_type_name}") + op.execute(sa.text(f"DROP TYPE {tmp_type_name}")) elif dialect == 'sqlite': with op.batch_alter_table('table_name') as batch_op: batch_op.alter_column( diff --git a/web/server/codechecker_server/migrations/config/versions/7ed50f8b3fb8_new_table_for_personal_access_tokens.py b/web/server/codechecker_server/migrations/config/versions/7ed50f8b3fb8_new_table_for_personal_access_tokens.py index 9d5280ab4c..92f65d2601 100644 --- a/web/server/codechecker_server/migrations/config/versions/7ed50f8b3fb8_new_table_for_personal_access_tokens.py +++ b/web/server/codechecker_server/migrations/config/versions/7ed50f8b3fb8_new_table_for_personal_access_tokens.py @@ -52,18 +52,18 @@ def upgrade(): one_year_later = datetime.now() + timedelta(days=365) op.execute( - f""" + sa.text(f""" INSERT INTO personal_access_tokens (user_name, token_name, token, description, last_access, expiration) SELECT user_name, {token_name}, token, description, last_access, '{one_year_later}' FROM auth_sessions WHERE can_expire = false - """) - op.execute(""" + """)) + op.execute(sa.text(""" DELETE FROM auth_sessions WHERE can_expire = false - """) + """)) if dialect == "sqlite": with op.batch_alter_table("auth_sessions", recreate="never") as ba: diff --git a/web/server/codechecker_server/migrations/config/versions/f59dfe4623fa_clear_legacy_web_sessions.py b/web/server/codechecker_server/migrations/config/versions/f59dfe4623fa_clear_legacy_web_sessions.py index e4a0785b46..4d60d83b00 100644 --- a/web/server/codechecker_server/migrations/config/versions/f59dfe4623fa_clear_legacy_web_sessions.py +++ b/web/server/codechecker_server/migrations/config/versions/f59dfe4623fa_clear_legacy_web_sessions.py @@ -20,7 +20,7 @@ def upgrade(): - op.execute("DELETE FROM auth_sessions WHERE can_expire") + op.execute(sa.text("DELETE FROM auth_sessions WHERE can_expire")) def downgrade(): diff --git a/web/server/codechecker_server/migrations/report/env.py b/web/server/codechecker_server/migrations/report/env.py index 9b5fcc58fd..56c0a78f7e 100644 --- a/web/server/codechecker_server/migrations/report/env.py +++ b/web/server/codechecker_server/migrations/report/env.py @@ -37,14 +37,7 @@ def run_migrations_online(): In this scenario we need to create an Engine and associate a connection with the context. """ - connectable = config.attributes.get('connection', None) - if connectable is None: - connectable = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - with connectable.connect() as connection: + def migrate(connection): context.configure( connection=connection, target_metadata=target_metadata @@ -55,6 +48,18 @@ def run_migrations_online(): with context.begin_transaction(): context.run_migrations() + connection = config.attributes.get('connection', None) + if connection: + migrate(connection) + else: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + with connectable.connect() as connection: + migrate(connection) + if context.is_offline_mode(): raise NotImplementedError(f"Offline '{schema}' migration is not possible!") diff --git a/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py b/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py index ea85cc35b5..ef60f3bd5c 100644 --- a/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py +++ b/web/server/codechecker_server/migrations/report/versions/3e91d0612422_off_and_unavailable_detection_statuses.py @@ -34,24 +34,25 @@ def upgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute('ALTER TYPE ' + name + ' RENAME TO ' + tmp_name) + op.execute(sa.text('ALTER TYPE ' + name + ' RENAME TO ' + tmp_name)) # Create the new enum. new_type.create(op.get_bind()) # Alter detection status column. - op.execute('ALTER TABLE ' + table_name + ' ALTER COLUMN ' + name + - ' TYPE ' + name + ' USING ' + name + '::text::' + name) + op.execute(sa.text( + 'ALTER TABLE ' + table_name + ' ALTER COLUMN ' + name + + ' TYPE ' + name + ' USING ' + name + '::text::' + name)) # Drop the old enum. - op.execute('DROP TYPE ' + tmp_name) + op.execute(sa.text('DROP TYPE ' + tmp_name)) elif dialect == 'sqlite': - op.execute('PRAGMA foreign_keys=off') + op.execute(sa.text('PRAGMA foreign_keys=off')) with op.batch_alter_table('reports') as batch_op: batch_op.alter_column(name, existing_type=old_type, type_=new_type) - op.execute('PRAGMA foreign_keys=on') + op.execute(sa.text('PRAGMA foreign_keys=on')) def downgrade(): @@ -60,21 +61,22 @@ def downgrade(): if dialect == 'postgresql': # Rename the enum type what we want to change. - op.execute('ALTER TYPE ' + name + ' RENAME TO ' + tmp_name) + op.execute(sa.text('ALTER TYPE ' + name + ' RENAME TO ' + tmp_name)) # Create the new enum. old_type.create(op.get_bind()) # Alter detection status column. - op.execute('ALTER TABLE ' + table_name + ' ALTER COLUMN ' + name + - ' TYPE ' + name + ' USING ' + name + '::text::' + name) + op.execute(sa.text( + 'ALTER TABLE ' + table_name + ' ALTER COLUMN ' + name + + ' TYPE ' + name + ' USING ' + name + '::text::' + name)) # Drop the old enum. - op.execute('DROP TYPE ' + tmp_name) + op.execute(sa.text('DROP TYPE ' + tmp_name)) elif dialect == 'sqlite': - op.execute('PRAGMA foreign_keys=off') + op.execute(sa.text('PRAGMA foreign_keys=off')) with op.batch_alter_table('reports') as batch_op: batch_op.alter_column(name, existing_type=new_type, type_=old_type) - op.execute('PRAGMA foreign_keys=on') + op.execute(sa.text('PRAGMA foreign_keys=on')) diff --git a/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py b/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py index 7d6fc4afdb..7ee2c0ea10 100644 --- a/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py +++ b/web/server/codechecker_server/migrations/report/versions/75ae226b5d88_review_status_for_each_report.py @@ -63,7 +63,7 @@ def decode_file_content(content): # reaches LTS maturity (Ubuntu 20.04 LTS comes with 3.31.0, raising # a syntax error on the "FROM" in the "UPDATE" query), this # branching here needs to stay. - conn.execute(""" + conn.execute(sa.text(""" UPDATE reports SET (review_status, review_status_author, @@ -72,7 +72,7 @@ def decode_file_content(content): (SELECT status, author, date, message FROM review_statuses WHERE bug_hash = reports.bug_id) - """) + """)) elif dialect == 'postgresql': op.add_column('reports', col_rs) op.add_column('reports', col_rs_author) @@ -80,7 +80,7 @@ def decode_file_content(content): op.add_column('reports', col_rs_is_in_source) op.add_column('reports', col_rs_message) - conn.execute(""" + conn.execute(sa.text(""" UPDATE reports SET review_status = rs.status, review_status_author = rs.author, @@ -88,19 +88,19 @@ def decode_file_content(content): review_status_message = rs.message FROM review_statuses AS rs WHERE bug_id = rs.bug_hash - """) + """)) - conn.execute(""" + conn.execute(sa.text(""" UPDATE reports SET review_status = 'unreviewed' WHERE review_status IS NULL - """) + """)) - files_with_report = conn.execute(""" + files_with_report = conn.execute(sa.text(""" SELECT DISTINCT reports.file_id, files.content_hash FROM reports INNER JOIN files ON reports.file_id = files.id WHERE review_status != 'unreviewed' - """) + """)) content_hashes = set() hash_to_content = {} @@ -111,19 +111,19 @@ def decode_file_content(content): file_id_to_content_hash[f.file_id] = f.content_hash if content_hashes: - hash_to_content = conn.execute(f""" + hash_to_content = conn.execute(sa.text(f""" SELECT content_hash, content FROM file_contents WHERE content_hash IN ({','.join(content_hashes)}) - """) + """)) hash_to_content = { x.content_hash: decode_file_content(x.content) for x in hash_to_content} - report_id_to_line = conn.execute(f""" + report_id_to_line = conn.execute(sa.text(f""" SELECT id, file_id, bug_id, checker_id, line FROM reports WHERE review_status != 'unreviewed' - """) + """)) scch = SourceCodeCommentHandler() comment_cache = {} @@ -150,47 +150,47 @@ def decode_file_content(content): review_status_to_report_ids[comment.status].add(row.id) if review_status_in_source: - conn.execute(f""" + conn.execute(sa.text(f""" UPDATE reports SET review_status_is_in_source = '1' WHERE id IN ({','.join(map(str, review_status_in_source))}) - """) + """)) # Earlier a common review status belonged to all reports sharing the same # bug hash even if these reports had different review status given in # source code comment. Now these are set individually. for review_status, report_ids in review_status_to_report_ids.items(): - conn.execute(f""" + conn.execute(sa.text(f""" UPDATE reports SET review_status = '{review_status}' WHERE id IN ({','.join(map(str, report_ids))}) - """) + """)) - results = conn.execute(""" + results = conn.execute(sa.text(""" SELECT bug_hash, date FROM review_statuses WHERE status IN ('false_positive', 'intentional') - """) + """)) for row in results: if dialect == 'sqlite': - conn.execute(f""" + conn.execute(sa.text(f""" UPDATE reports SET fixed_at = max('{row.date}', detected_at) WHERE fixed_at IS NULL AND bug_id = '{row.bug_hash}' - """) + """)) elif dialect == 'postgresql': - conn.execute(f""" + conn.execute(sa.text(f""" UPDATE reports SET fixed_at = greatest('{row.date}', detected_at)::timestamp WHERE fixed_at IS NULL AND bug_id = '{row.bug_hash}' - """) + """)) if bug_hashes: - conn.execute(f""" + conn.execute(sa.text(f""" DELETE FROM review_statuses WHERE bug_hash IN ({','.join(bug_hashes)}) - """) + """)) def downgrade(): diff --git a/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py b/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py index 4fd88ef84e..82cbe74ca5 100644 --- a/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py +++ b/web/server/codechecker_server/migrations/report/versions/c3dad71f8e6b_store_information_about_enabled_and_disabled_checkers_for_a_run.py @@ -45,18 +45,18 @@ def normalise_report_analyzer_and_checker_names(): # These values are normalised such that in the following, when the # foreign key-based look-up is added to the schema, their new # 'checker_id' will all point to the single "UNKNOWN/NOT FOUND" case. - analyzer_name_affected = conn.execute(f""" + analyzer_name_affected = conn.execute(sa.text(f""" UPDATE reports SET analyzer_name = '{UnknownChecker[0]}' WHERE analyzer_name = '' OR LOWER(analyzer_name) = 'unknown' ; - """).rowcount + """)).rowcount if analyzer_name_affected: LOG.info("Normalising 'reports'... %d unknown 'analyzer_name'.", analyzer_name_affected) - checker_id_affected = conn.execute(f""" + checker_id_affected = conn.execute(sa.text(f""" UPDATE reports SET checker_id = '{UnknownChecker[1]}' WHERE checker_id IS NULL @@ -64,7 +64,7 @@ def normalise_report_analyzer_and_checker_names(): OR LOWER(checker_id) = 'not found' OR LOWER(checker_id) = 'unknown' ; - """).rowcount + """)).rowcount if checker_id_affected: LOG.info("Normalising 'reports'... %d unknown 'checker_id'.", checker_id_affected) @@ -85,8 +85,8 @@ def normalise_report_analyzer_and_checker_names(): # this while keeping the entire migration in one transaction. # However, these changes are not destructive because no data is # lost, only the representation slightly changed. - conn.execute("COMMIT;") - conn.execute("START TRANSACTION;") + conn.execute(sa.text("COMMIT;")) + conn.execute(sa.text("START TRANSACTION;")) def create_new_tables(): op.create_table( diff --git a/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py b/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py index c2e516d023..dde279562f 100644 --- a/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py +++ b/web/server/codechecker_server/migrations/report/versions/dabc6998b8f0_analysis_info_table.py @@ -65,11 +65,11 @@ def upgrade(): ) try: - run_histories = conn.execute(""" + run_histories = conn.execute(sa.text(""" SELECT id, run_id, check_command FROM run_histories ORDER BY id DESC - """).fetchall() + """)).fetchall() uniqued_analysis_info = {} run_analysis_info = {} diff --git a/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py b/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py index 8eab7c2c90..5434895ad2 100644 --- a/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py +++ b/web/server/codechecker_server/migrations/report/versions/e89887e7d3f0_add_bug_path_length.py @@ -24,13 +24,13 @@ def upgrade(): conn = op.get_bind() - conn.execute(""" + conn.execute(sa.text(""" UPDATE reports SET path_length = (SELECT COUNT(bug_path_events.report_id) FROM bug_path_events WHERE bug_path_events.report_id = reports.id) - """) + """)) def downgrade(): diff --git a/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py b/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py index 78138e3d0d..cabfba1531 100644 --- a/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py +++ b/web/server/codechecker_server/migrations/report/versions/f8291ab1d6be_fix_setting_analysis_info_id_seq.py @@ -7,6 +7,7 @@ """ from alembic import op +import sqlalchemy as sa # Revision identifiers, used by Alembic. @@ -21,12 +22,12 @@ def upgrade(): dialect = ctx.dialect.name if dialect == 'postgresql': - op.execute(""" + op.execute(sa.text(""" SELECT SETVAL( 'analysis_info_id_seq', (SELECT MAX(id) + 1 FROM analysis_info) ) - """) + """)) def downgrade(): diff --git a/web/server/codechecker_server/product.py b/web/server/codechecker_server/product.py index 3c18de4339..d746146a9a 100644 --- a/web/server/codechecker_server/product.py +++ b/web/server/codechecker_server/product.py @@ -14,6 +14,7 @@ from datetime import datetime from typing import Optional +from sqlalchemy import text from sqlalchemy.orm import sessionmaker from codechecker_api_shared.ttypes import DBStatus @@ -140,7 +141,8 @@ def connect(self, init_db=False): self.__session = sessionmaker(bind=self.__engine) - self.__engine.execute('SELECT 1') + with self.__engine.connect() as connection: + connection.execute(text('SELECT 1')) self.__db_status = sql_server.check_schema() self.__last_connect_attempt = None diff --git a/web/server/codechecker_server/server.py b/web/server/codechecker_server/server.py index 74eccd1ea4..739f5f7fd1 100644 --- a/web/server/codechecker_server/server.py +++ b/web/server/codechecker_server/server.py @@ -874,13 +874,15 @@ def is_database_used(self, conn): """ # get the database name from the database connection args - conn = make_url(conn.connection) - is_sqlite = conn.engine == 'sqlite' + driver = \ + 'pysqlite' if conn.connection.engine == 'sqlite' else 'psycopg2' # create a tuple of database that is going to be added for comparison - to_add = (f"{conn.engine}+pysqlite" if is_sqlite - else f"{conn.engine}+psycopg2", - conn.database, conn.host, conn.port) + to_add = ( + f"{conn.connection.engine}+{driver}", + conn.connection.database, + conn.connection.host, + conn.connection.port) # create a tuple of database that is already connected for comparison def to_tuple(product): diff --git a/web/server/codechecker_server/task_executors/task_manager.py b/web/server/codechecker_server/task_executors/task_manager.py index 2f53664f74..4db887b224 100644 --- a/web/server/codechecker_server/task_executors/task_manager.py +++ b/web/server/codechecker_server/task_executors/task_manager.py @@ -180,8 +180,7 @@ def get_task_record(self, token: str) -> DBTask: This class should not be mutated, only the fields queried. """ with DBSession(self._database_factory) as session: - db_task: Optional[DBTask] = \ - session.query(DBTask).get(token) + db_task: Optional[DBTask] = session.get(DBTask, token) if not db_task: raise KeyError(f"No task record for token '{token}' " "in the database") @@ -203,8 +202,7 @@ def _mutate_task_record(self, task_obj: "AbstractTask", corresponding to the `task_obj` description available in memory. """ with DBSession(self._database_factory) as session: - db_task: Optional[DBTask] = \ - session.query(DBTask).get(task_obj.token) + db_task: Optional[DBTask] = session.get(DBTask, task_obj.token) if not db_task: raise KeyError(f"No task record for token '{task_obj.token}' " "in the database")