diff --git a/lib/rucio/common/types.py b/lib/rucio/common/types.py index 818d761428..3991fd8a82 100644 --- a/lib/rucio/common/types.py +++ b/lib/rucio/common/types.py @@ -166,11 +166,11 @@ class RuleDict(TypedDict): copies: int rse_expression: str grouping: Literal['ALL', 'DATASET', 'NONE'] - weight: str - lifetime: int + weight: Optional[str] + lifetime: Optional[int] locked: bool - subscription_id: str + subscription_id: Optional[str] source_replica_expression: Optional[str] activity: str - notify: Optional[Literal['Y', 'N', 'C']] + notify: Optional[Literal['Y', 'N', 'C', 'P']] purge_replicas: bool diff --git a/lib/rucio/common/utils.py b/lib/rucio/common/utils.py index ef17f7e50e..9e3771e07a 100644 --- a/lib/rucio/common/utils.py +++ b/lib/rucio/common/utils.py @@ -796,6 +796,22 @@ def __strip_dsn(dsn: str) -> str: stripped_dsn = '.'.join(fields) return stripped_dsn + @staticmethod + def __strip_tag(tag: str) -> str: + """ + Drop the _sub and _dis suffixes for panda datasets from the lfc path + they will be registered in + Method imported from DQ2. + """ + suffixes_to_drop = ['_dis', '_sub', '_tid'] + stripped_tag = tag + try: + for suffix in suffixes_to_drop: + stripped_tag = re.sub('%s.*$' % suffix, '', stripped_tag) + except IndexError: + return stripped_tag + return stripped_tag + @staticmethod def construct_surl_DQ2(dsn: str, scope: str, filename: str) -> str: """ @@ -831,7 +847,7 @@ def construct_surl_DQ2(dsn: str, scope: str, filename: str) -> str: if nfields == 5: tag = 'other' else: - tag = __strip_tag(fields[-1]) + tag = SurlAlgorithms.__strip_tag(fields[-1]) stripped_dsn = SurlAlgorithms.__strip_dsn(dsn) return '/%s/%s/%s/%s/%s' % (project, dataset_type, tag, stripped_dsn, filename) @@ -890,22 +906,6 @@ def construct_surl(dsn: str, scope: str, filename: str, naming_convention: str = return surl_algorithms.construct_surl(dsn, scope, filename, naming_convention) -def __strip_tag(tag): - """ - Drop the _sub and _dis suffixes for panda datasets from the lfc path - they will be registered in - Method imported from DQ2. - """ - suffixes_to_drop = ['_dis', '_sub', '_tid'] - stripped_tag = tag - try: - for suffix in suffixes_to_drop: - stripped_tag = re.sub('%s.*$' % suffix, '', stripped_tag) - except IndexError: - return stripped_tag - return stripped_tag - - def clean_surls(surls): res = [] for surl in surls: diff --git a/lib/rucio/core/rule.py b/lib/rucio/core/rule.py index ecd39882a1..88603bff4a 100644 --- a/lib/rucio/core/rule.py +++ b/lib/rucio/core/rule.py @@ -143,10 +143,33 @@ def default(rule: models.ReplicationRule, did: models.DataIdentifier, session: ' @transactional_session -def add_rule(dids: Sequence[dict[str, Any]], account: InternalAccount, copies: int, rse_expression: str, grouping: Literal['ALL', 'DATASET', 'NONE'], weight: str, lifetime: int, locked: bool, subscription_id: str, - source_replica_expression: Optional[str] = None, activity: str = 'User Subscriptions', notify: Optional[Literal['Y', 'N', 'C']] = None, purge_replicas: bool = False, - ignore_availability: bool = False, comment: Optional[str] = None, ask_approval: bool = False, asynchronous: bool = False, ignore_account_limit: bool = False, - priority: int = 3, delay_injection: Optional[int] = None, split_container: bool = False, meta: Optional[dict[str, Any]] = None, *, session: "Session", logger: LoggerFunction = logging.log) -> list[str]: +def add_rule( + dids: Sequence[dict[str, Any]], + account: InternalAccount, + copies: int, + rse_expression: str, + grouping: Literal['ALL', 'DATASET', 'NONE'], + weight: Optional[str], + lifetime: Optional[int], + locked: bool, + subscription_id: Optional[str], + source_replica_expression: Optional[str] = None, + activity: str = 'User Subscriptions', + notify: Optional[Literal['Y', 'N', 'C', 'P']] = None, + purge_replicas: bool = False, + ignore_availability: bool = False, + comment: Optional[str] = None, + ask_approval: bool = False, + asynchronous: bool = False, + ignore_account_limit: bool = False, + priority: int = 3, + delay_injection: Optional[int] = None, + split_container: bool = False, + meta: Optional[dict[str, Any]] = None, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> list[str]: """ Adds a replication rule for every did in dids @@ -231,7 +254,7 @@ def add_rule(dids: Sequence[dict[str, Any]], account: InternalAccount, copies: i expires_at = datetime.utcnow() + timedelta(seconds=lifetime) if lifetime is not None else None - notify_value = {'Y': RuleNotification.YES, 'C': RuleNotification.CLOSE, 'P': RuleNotification.PROGRESS}.get(str(notify or ''), RuleNotification.NO) + notify_value = {'Y': RuleNotification.YES, 'C': RuleNotification.CLOSE, 'P': RuleNotification.PROGRESS}.get(notify or '', RuleNotification.NO) for elem in dids: # 3. Get the did @@ -434,7 +457,13 @@ def add_rule(dids: Sequence[dict[str, Any]], account: InternalAccount, copies: i @transactional_session -def add_rules(dids: Sequence[dict[str, Any]], rules: Sequence[RuleDict], *, session: "Session", logger: LoggerFunction = logging.log) -> dict[tuple[InternalScope, str], list[str]]: +def add_rules( + dids: Sequence[dict[str, Any]], + rules: Sequence[RuleDict], + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> dict[tuple[InternalScope, str], list[str]]: """ Adds a list of replication rules to every did in dids @@ -592,7 +621,8 @@ def add_rules(dids: Sequence[dict[str, Any]], rules: Sequence[RuleDict], *, sess with METRICS.timer('add_rules.create_rule'): grouping = {'ALL': RuleGrouping.ALL, 'NONE': RuleGrouping.NONE}.get(str(rule.get('grouping')), RuleGrouping.DATASET) - expires_at: Optional[datetime] = datetime.utcnow() + timedelta(seconds=rule.get('lifetime')) if rule.get('lifetime') is not None else None + rule_lifetime: Optional[int] = rule.get('lifetime') + expires_at: Optional[datetime] = datetime.utcnow() + timedelta(seconds=rule_lifetime) if rule_lifetime is not None else None notify = {'Y': RuleNotification.YES, 'C': RuleNotification.CLOSE, 'P': RuleNotification.PROGRESS, None: RuleNotification.NO}.get(rule.get('notify')) @@ -738,7 +768,12 @@ def add_rules(dids: Sequence[dict[str, Any]], rules: Sequence[RuleDict], *, sess @transactional_session -def inject_rule(rule_id: str, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def inject_rule( + rule_id: str, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Inject a replication rule. @@ -896,7 +931,11 @@ def inject_rule(rule_id: str, *, session: "Session", logger: LoggerFunction = lo @stream_session -def list_rules(filters: dict[str, Any] = {}, *, session: "Session") -> Iterator[dict]: +def list_rules( + filters: Optional[dict[str, Any]] = None, + *, + session: "Session" +) -> Iterator[dict[str, Any]]: """ List replication rules. @@ -915,7 +954,7 @@ def list_rules(filters: dict[str, Any] = {}, *, session: "Session") -> Iterator[ models.ReplicationRule.name == models.DataIdentifier.name ) ) - if filters: + if filters is not None: for (key, value) in filters.items(): if key in ['account', 'scope']: if '*' in value.internal: @@ -959,7 +998,11 @@ def list_rules(filters: dict[str, Any] = {}, *, session: "Session") -> Iterator[ @stream_session -def list_rule_history(rule_id: str, *, session: "Session") -> Iterator[dict[str, Any]]: +def list_rule_history( + rule_id: str, + *, + session: "Session" +) -> Iterator[dict[str, Any]]: """ List the rule history of a rule. @@ -988,7 +1031,12 @@ def list_rule_history(rule_id: str, *, session: "Session") -> Iterator[dict[str, @stream_session -def list_rule_full_history(scope: InternalScope, name: str, *, session: "Session") -> Iterator[dict[str, Any]]: +def list_rule_full_history( + scope: InternalScope, + name: str, + *, + session: "Session" +) -> Iterator[dict[str, Any]]: """ List the rule history of a DID. @@ -1022,7 +1070,12 @@ def list_rule_full_history(scope: InternalScope, name: str, *, session: "Session @stream_session -def list_associated_rules_for_file(scope: InternalScope, name: str, *, session: "Session") -> Iterator[dict[str, Any]]: +def list_associated_rules_for_file( + scope: InternalScope, + name: str, + *, + session: "Session" +) -> Iterator[dict[str, Any]]: """ List replication rules a file is affected from. @@ -1054,8 +1107,16 @@ def list_associated_rules_for_file(scope: InternalScope, name: str, *, session: @transactional_session -def delete_rule(rule_id: str, purge_replicas: Optional[bool] = None, soft: bool = False, delete_parent: bool = False, nowait: bool = False, *, session: "Session", - ignore_rule_lock: bool = False) -> None: +def delete_rule( + rule_id: str, + purge_replicas: Optional[bool] = None, + soft: bool = False, + delete_parent: bool = False, + nowait: bool = False, + *, + session: "Session", + ignore_rule_lock: bool = False +) -> None: """ Delete a replication rule. @@ -1154,7 +1215,12 @@ def delete_rule(rule_id: str, purge_replicas: Optional[bool] = None, soft: bool @transactional_session -def repair_rule(rule_id: str, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def repair_rule( + rule_id: str, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Repair a STUCK replication rule. @@ -1463,7 +1529,11 @@ def repair_rule(rule_id: str, *, session: "Session", logger: LoggerFunction = lo @read_session -def get_rule(rule_id: str, *, session: "Session") -> dict[str, Any]: +def get_rule( + rule_id: str, + *, + session: "Session" +) -> dict[str, Any]: """ Get a specific replication rule. @@ -1487,7 +1557,12 @@ def get_rule(rule_id: str, *, session: "Session") -> dict[str, Any]: @transactional_session -def update_rule(rule_id: str, options: dict[str, Any], *, session: "Session") -> None: +def update_rule( + rule_id: str, + options: dict[str, Any], + *, + session: "Session" +) -> None: """ Update a rules options. @@ -1789,7 +1864,13 @@ def update_rule(rule_id: str, options: dict[str, Any], *, session: "Session") -> @transactional_session -def reduce_rule(rule_id: str, copies: int, exclude_expression: Optional[str] = None, *, session: "Session") -> str: +def reduce_rule( + rule_id: str, + copies: int, + exclude_expression: Optional[str] = None, + *, + session: "Session" +) -> str: """ Reduce the number of copies for a rule by atomically replacing the rule. @@ -1865,7 +1946,13 @@ def reduce_rule(rule_id: str, copies: int, exclude_expression: Optional[str] = N @transactional_session -def move_rule(rule_id: str, rse_expression: str, override: Optional[dict[str, Any]] = None, *, session: "Session") -> str: +def move_rule( + rule_id: str, + rse_expression: str, + override: Optional[dict[str, Any]] = None, + *, + session: "Session" +) -> str: """ Move a replication rule to another RSE and, once done, delete the original one. @@ -1939,7 +2026,13 @@ def move_rule(rule_id: str, rse_expression: str, override: Optional[dict[str, An @transactional_session -def re_evaluate_did(scope: InternalScope, name: str, rule_evaluation_action: DIDReEvaluation, *, session: "Session") -> None: +def re_evaluate_did( + scope: InternalScope, + name: str, + rule_evaluation_action: DIDReEvaluation, + *, + session: "Session" +) -> None: """ Re-Evaluates a did. @@ -1989,7 +2082,14 @@ def re_evaluate_did(scope: InternalScope, name: str, rule_evaluation_action: DID @read_session -def get_updated_dids(total_workers: int, worker_number: int, limit: int = 100, blocked_dids: Iterable[tuple[str, str]] = [], *, session: "Session") -> list[tuple[str, str]]: +def get_updated_dids( + total_workers: int, + worker_number: int, + limit: int = 100, + blocked_dids: Iterable[tuple[str, str]] = [], + *, + session: "Session" +) -> list[tuple[str, InternalScope, str, DIDReEvaluation]]: """ Get updated dids. @@ -2024,11 +2124,23 @@ def get_updated_dids(total_workers: int, worker_number: int, limit: int = 100, b else: return filtered_dids else: - return [did for did in session.execute(stmt.order_by(models.UpdatedDID.created_at)).all() if (did.scope, did.name) not in blocked_dids] # type: ignore + return [did._tuple() for did in session.execute(stmt.order_by(models.UpdatedDID.created_at)).all() if (did.scope, did.name) not in blocked_dids] @read_session -def get_rules_beyond_eol(date_check: datetime, worker_number: int, total_workers: int, *, session: "Session") -> list[dict[str, Any]]: +def get_rules_beyond_eol( + date_check: datetime, + worker_number: int, + total_workers: int, *, + session: "Session" +) -> list[tuple[InternalScope, + str, + str, + bool, + str, + Optional[datetime], + Optional[datetime], + InternalAccount]]: """ Get rules which have eol_at before a certain date. @@ -2051,11 +2163,18 @@ def get_rules_beyond_eol(date_check: datetime, worker_number: int, total_workers ) stmt = filter_thread_work(session=session, query=stmt, total_threads=total_workers, thread_id=worker_number, hash_variable='name') - return session.execute(stmt).all() # type: ignore + return [row._tuple() for row in session.execute(stmt).all()] @read_session -def get_expired_rules(total_workers: int, worker_number: int, limit: int = 100, blocked_rules: Sequence[str] = [], *, session: "Session") -> list[tuple[str, str]]: +def get_expired_rules( + total_workers: int, + worker_number: int, + limit: int = 100, + blocked_rules: Sequence[str] = [], + *, + session: "Session" +) -> list[tuple[str, str]]: """ Get expired rules. @@ -2083,7 +2202,7 @@ def get_expired_rules(total_workers: int, worker_number: int, limit: int = 100, if limit: stmt = stmt.limit(limit) result = session.execute(stmt).all() - filtered_rules = [rule for rule in result if rule.id not in blocked_rules] + filtered_rules = [rule._tuple() for rule in result if rule.id not in blocked_rules] if len(result) == limit and not filtered_rules: return get_expired_rules(total_workers=total_workers, worker_number=worker_number, @@ -2091,13 +2210,20 @@ def get_expired_rules(total_workers: int, worker_number: int, limit: int = 100, blocked_rules=blocked_rules, session=session) else: - return filtered_rules # type: ignore + return filtered_rules else: - return [rule for rule in session.execute(stmt).all() if rule.id not in blocked_rules] # type: ignore + return [rule._tuple() for rule in session.execute(stmt).all() if rule.id not in blocked_rules] @read_session -def get_injected_rules(total_workers: int, worker_number: int, limit: int = 100, blocked_rules: Sequence[str] = [], *, session: "Session") -> list[tuple[str, str]]: +def get_injected_rules( + total_workers: int, + worker_number: int, + limit: int = 100, + blocked_rules: Sequence[str] = [], + *, + session: "Session" +) -> list[str]: """ Get rules to be injected. @@ -2123,7 +2249,7 @@ def get_injected_rules(total_workers: int, worker_number: int, limit: int = 100, if limit: stmt = stmt.limit(limit) result = session.execute(stmt).all() - filtered_rules = [rule for rule in result if rule.id not in blocked_rules] + filtered_rules = [rule._tuple() for rule in result if rule.id not in blocked_rules] if len(result) == limit and not filtered_rules: return get_injected_rules(total_workers=total_workers, worker_number=worker_number, @@ -2131,13 +2257,21 @@ def get_injected_rules(total_workers: int, worker_number: int, limit: int = 100, blocked_rules=blocked_rules, session=session) else: - return filtered_rules # type: ignore + return filtered_rules else: - return [rule for rule in session.execute(stmt).all() if rule.id not in blocked_rules] # type: ignore + return [rule._tuple() for rule in session.execute(stmt).all() if rule.id not in blocked_rules] @read_session -def get_stuck_rules(total_workers: int, worker_number: int, delta: int = 600, limit: int = 10, blocked_rules: Sequence[str] = [], *, session: "Session") -> list[tuple[str, str]]: +def get_stuck_rules( + total_workers: int, + worker_number: int, + delta: int = 600, + limit: int = 10, + blocked_rules: Sequence[str] = [], + *, + session: "Session" +) -> list[str]: """ Get stuck rules. @@ -2166,7 +2300,7 @@ def get_stuck_rules(total_workers: int, worker_number: int, delta: int = 600, li if limit: stmt = stmt.limit(limit) result = session.execute(stmt).all() - filtered_rules = [rule for rule in result if rule.id not in blocked_rules] + filtered_rules = [rule._tuple() for rule in result if rule.id not in blocked_rules] if len(result) == limit and not filtered_rules: return get_stuck_rules(total_workers=total_workers, worker_number=worker_number, @@ -2175,13 +2309,17 @@ def get_stuck_rules(total_workers: int, worker_number: int, delta: int = 600, li blocked_rules=blocked_rules, session=session) else: - return filtered_rules # type: ignore + return filtered_rules else: - return [rule for rule in session.execute(stmt).all() if rule.id not in blocked_rules] # type: ignore + return [rule._tuple() for rule in session.execute(stmt).all() if rule.id not in blocked_rules] @transactional_session -def delete_updated_did(id_: str, *, session: "Session") -> None: +def delete_updated_did( + id_: str, + *, + session: "Session" +) -> None: """ Delete an updated_did by id. @@ -2197,7 +2335,15 @@ def delete_updated_did(id_: str, *, session: "Session") -> None: @transactional_session -def update_rules_for_lost_replica(scope: InternalScope, name: str, rse_id: str, nowait: bool = False, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def update_rules_for_lost_replica( + scope: InternalScope, + name: str, + rse_id: str, + nowait: bool = False, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Update rules if a file replica is lost. @@ -2340,7 +2486,15 @@ def update_rules_for_lost_replica(scope: InternalScope, name: str, rse_id: str, @transactional_session -def update_rules_for_bad_replica(scope: InternalScope, name: str, rse_id: str, nowait: bool = False, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def update_rules_for_bad_replica( + scope: InternalScope, + name: str, + rse_id: str, + nowait: bool = False, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Update rules if a file replica is bad and has to be recreated. @@ -2465,7 +2619,12 @@ def update_rules_for_bad_replica(scope: InternalScope, name: str, rse_id: str, n @transactional_session -def generate_rule_notifications(rule: models.ReplicationRule, replicating_locks_before: Optional[int] = None, *, session: "Session") -> None: +def generate_rule_notifications( + rule: models.ReplicationRule, + replicating_locks_before: Optional[int] = None, + *, + session: "Session" +) -> None: """ Generate (If necessary) a callback for a rule (DATASETLOCK_OK, RULE_OK, DATASETLOCK_PROGRESS) @@ -2583,7 +2742,12 @@ def generate_rule_notifications(rule: models.ReplicationRule, replicating_locks_ @transactional_session -def generate_email_for_rule_ok_notification(rule: models.ReplicationRule, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def generate_email_for_rule_ok_notification( + rule: models.ReplicationRule, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Generate (If necessary) an eMail for a rule with notification mode Y. @@ -2634,7 +2798,13 @@ def generate_email_for_rule_ok_notification(rule: models.ReplicationRule, *, ses @transactional_session -def insert_rule_history(rule: models.ReplicationRule, recent: bool = True, longterm: bool = False, *, session: "Session") -> None: +def insert_rule_history( + rule: models.ReplicationRule, + recent: bool = True, + longterm: bool = False, + *, + session: "Session" +) -> None: """ Insert rule history to recent/longterm history. @@ -2664,7 +2834,13 @@ def insert_rule_history(rule: models.ReplicationRule, recent: bool = True, longt @transactional_session -def approve_rule(rule_id: str, approver: Optional[str] = None, notify_approvers: bool = True, *, session: "Session") -> None: +def approve_rule( + rule_id: str, + approver: Optional[str] = None, + notify_approvers: bool = True, + *, + session: "Session" +) -> None: """ Approve a specific replication rule. @@ -2729,7 +2905,13 @@ def approve_rule(rule_id: str, approver: Optional[str] = None, notify_approvers: @transactional_session -def deny_rule(rule_id: str, approver: Optional[str] = None, reason: Optional[str] = None, *, session: "Session") -> None: +def deny_rule( + rule_id: str, + approver: Optional[str] = None, + reason: Optional[str] = None, + *, + session: "Session" +) -> None: """ Deny a specific replication rule. @@ -2793,7 +2975,11 @@ def deny_rule(rule_id: str, approver: Optional[str] = None, reason: Optional[str @transactional_session -def examine_rule(rule_id: str, *, session: "Session") -> dict[str, Any]: +def examine_rule( + rule_id: str, + *, + session: "Session" +) -> dict[str, Any]: """ Examine a replication rule for transfer errors. @@ -2877,7 +3063,11 @@ def examine_rule(rule_id: str, *, session: "Session") -> dict[str, Any]: @transactional_session -def get_evaluation_backlog(expiration_time: int = 600, *, session: "Session") -> tuple[int, datetime]: +def get_evaluation_backlog( + expiration_time: int = 600, + *, + session: "Session" +) -> tuple[int, datetime]: """ Counts the number of entries in the rule evaluation backlog. (Number of files to be evaluated) @@ -2891,14 +3081,19 @@ def get_evaluation_backlog(expiration_time: int = 600, *, session: "Session") -> func.count(models.UpdatedDID.created_at), func.min(models.UpdatedDID.created_at) ) - result = session.execute(stmt).scalars().one() + result = session.execute(stmt).one()._tuple() REGION.set('rule_evaluation_backlog', result) return result return cached_backlog @transactional_session -def release_parent_rule(child_rule_id: str, remove_parent_expiration: bool = False, *, session: "Session") -> None: +def release_parent_rule( + child_rule_id: str, + remove_parent_expiration: bool = False, + *, + session: "Session" +) -> None: """ Release a potential parent rule, because the child_rule is OK. @@ -2991,10 +3186,18 @@ def list_rules_for_rse_decommissioning( @transactional_session -def __find_missing_locks_and_create_them(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], Sequence[models.ReplicaLock]], - replicas: dict[tuple[str, str], Any], source_replicas: dict[tuple[str, str], Any], - rseselector: RSESelector, rule: models.ReplicationRule, source_rses: Sequence[str], *, - session: "Session", logger: LoggerFunction = logging.log) -> None: +def __find_missing_locks_and_create_them( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], Sequence[models.ReplicaLock]], + replicas: dict[tuple[InternalScope, str], Any], + source_replicas: dict[tuple[InternalScope, str], Any], + rseselector: RSESelector, + rule: models.ReplicationRule, + source_rses: Sequence[str], + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Find missing locks for a rule and create them. @@ -3041,7 +3244,14 @@ def __find_missing_locks_and_create_them(datasetfiles: Sequence[dict[str, Any]], @transactional_session -def __find_surplus_locks_and_remove_them(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], list[models.ReplicaLock]], rule: models.ReplicationRule, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def __find_surplus_locks_and_remove_them( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], list[models.ReplicaLock]], + rule: models.ReplicationRule, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Find surplocks locks for a rule and delete them. @@ -3085,10 +3295,18 @@ def __find_surplus_locks_and_remove_them(datasetfiles: Sequence[dict[str, Any]], @transactional_session -def __find_stuck_locks_and_repair_them(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], Sequence[models.ReplicaLock]], - replicas: dict[tuple[str, str], Any], source_replicas: dict[tuple[str, str], Any], - rseselector: RSESelector, rule: models.ReplicationRule, source_rses: Sequence[str], *, - session: "Session", logger: LoggerFunction = logging.log) -> None: +def __find_stuck_locks_and_repair_them( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], Sequence[models.ReplicaLock]], + replicas: dict[tuple[InternalScope, str], Any], + source_replicas: dict[tuple[InternalScope, str], Any], + rseselector: RSESelector, + rule: models.ReplicationRule, + source_rses: Sequence[str], + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Find stuck locks for a rule and repair them. @@ -3147,7 +3365,12 @@ def __find_stuck_locks_and_repair_them(datasetfiles: Sequence[dict[str, Any]], l @transactional_session -def __evaluate_did_detach(eval_did: models.DataIdentifier, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def __evaluate_did_detach( + eval_did: models.DataIdentifier, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Evaluate a parent did which has children removed. @@ -3268,7 +3491,12 @@ def __evaluate_did_detach(eval_did: models.DataIdentifier, *, session: "Session" @transactional_session -def __oldest_file_under(scope: InternalScope, name: str, *, session: "Session") -> Optional[tuple[InternalScope, str]]: +def __oldest_file_under( + scope: InternalScope, + name: str, + *, + session: "Session" +) -> Optional[tuple[InternalScope, str]]: """ Finds oldest file in oldest container/dataset in the container or the dataset, recursively. Oldest means attached to its parent first. @@ -3297,7 +3525,12 @@ def __oldest_file_under(scope: InternalScope, name: str, *, session: "Session") @transactional_session -def __evaluate_did_attach(eval_did: models.DataIdentifier, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def __evaluate_did_attach( + eval_did: models.DataIdentifier, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Evaluate a parent did which has new childs @@ -3538,12 +3771,18 @@ def __evaluate_did_attach(eval_did: models.DataIdentifier, *, session: "Session" @transactional_session -def __resolve_did_to_locks_and_replicas(did: models.DataIdentifier, nowait: bool = False, restrict_rses: Optional[Sequence[str]] = None, - source_rses: Optional[Sequence[str]] = None, only_stuck: bool = False, *, - session: "Session") -> tuple[list[dict[str, Any]], - dict[tuple[str, str], models.ReplicaLock], - dict[tuple[str, str], Any], - dict[tuple[str, str], str]]: +def __resolve_did_to_locks_and_replicas( + did: models.DataIdentifier, + nowait: bool = False, + restrict_rses: Optional[Sequence[str]] = None, + source_rses: Optional[Sequence[str]] = None, + only_stuck: bool = False, + *, + session: "Session" +) -> tuple[list[dict[str, Any]], + dict[tuple[str, str], models.ReplicaLock], + dict[tuple[str, str], models.RSEFileAssociation], + dict[tuple[str, str], str]]: """ Resolves a did to its constituent childs and reads the locks and replicas of all the constituent files. @@ -3640,13 +3879,17 @@ def __resolve_did_to_locks_and_replicas(did: models.DataIdentifier, nowait: bool @transactional_session -def __resolve_dids_to_locks_and_replicas(dids: Sequence[models.DataIdentifierAssociation], - nowait: bool = False, restrict_rses: Sequence[str] = [], - source_rses: Optional[Sequence[str]] = None, *, - session: "Session") -> tuple[list[dict[str, Any]], - dict[tuple[str, str], models.ReplicaLock], - dict[tuple[str, str], Any], - dict[tuple[str, str], str]]: +def __resolve_dids_to_locks_and_replicas( + dids: Sequence[models.DataIdentifierAssociation], + nowait: bool = False, + restrict_rses: Sequence[str] = [], + source_rses: Optional[Sequence[str]] = None, + *, + session: "Session" +) -> tuple[list[dict[str, Any]], + dict[tuple[str, str], models.ReplicaLock], + dict[tuple[str, str], models.RSEFileAssociation], + dict[tuple[str, str], str]]: """ Resolves a list of dids to its constituent childs and reads the locks and replicas of all the constituent files. @@ -3805,10 +4048,19 @@ def __resolve_dids_to_locks_and_replicas(dids: Sequence[models.DataIdentifierAss @transactional_session -def __create_locks_replicas_transfers(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], Sequence[models.ReplicaLock]], - replicas: dict[tuple[str, str], Any], source_replicas: dict[tuple[str, str], Any], - rseselector: RSESelector, rule: models.ReplicationRule, preferred_rse_ids: Sequence[str] = [], - source_rses: Sequence[str] = [], *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def __create_locks_replicas_transfers( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], Sequence[models.ReplicaLock]], + replicas: dict[tuple[InternalScope, str], Any], + source_replicas: dict[tuple[InternalScope, str], Any], + rseselector: RSESelector, + rule: models.ReplicationRule, + preferred_rse_ids: Sequence[str] = [], + source_rses: Sequence[str] = [], + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ Apply a created replication rule to a set of files @@ -3861,7 +4113,14 @@ def __create_locks_replicas_transfers(datasetfiles: Sequence[dict[str, Any]], lo @transactional_session -def __delete_lock_and_update_replica(lock: models.ReplicaLock, purge_replicas: bool = False, nowait: bool = False, *, session: "Session", logger: LoggerFunction = logging.log) -> bool: +def __delete_lock_and_update_replica( + lock: models.ReplicaLock, + purge_replicas: bool = False, + nowait: bool = False, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> bool: """ Delete a lock and update the associated replica. @@ -3906,7 +4165,11 @@ def __delete_lock_and_update_replica(lock: models.ReplicaLock, purge_replicas: b @transactional_session -def __create_rule_approval_email(rule: models.ReplicationRule, *, session: "Session") -> None: +def __create_rule_approval_email( + rule: models.ReplicationRule, + *, + session: "Session" +) -> None: """ Create the rule notification email. @@ -3992,7 +4255,12 @@ def __create_rule_approval_email(rule: models.ReplicationRule, *, session: "Sess @transactional_session -def _create_recipients_list(rse_expression: str, filter_: Optional[str] = None, *, session: "Session") -> list[tuple[str, InternalAccount]]: +def _create_recipients_list( + rse_expression: str, + filter_: Optional[str] = None, + *, + session: "Session" +) -> list[tuple[str, Union[str, InternalAccount]]]: """ Create a list of recipients for a notification email based on rse_expression. @@ -4085,7 +4353,13 @@ def __progress_class(replicating_locks, total_locks): @policy_filter @transactional_session -def archive_localgroupdisk_datasets(scope: InternalScope, name: str, *, session: "Session", logger: LoggerFunction = logging.log) -> None: +def archive_localgroupdisk_datasets( + scope: InternalScope, + name: str, + *, + session: "Session", + logger: LoggerFunction = logging.log +) -> None: """ ATLAS policy to archive a dataset which has a replica on LOCALGROUPDISK @@ -4153,7 +4427,13 @@ def archive_localgroupdisk_datasets(scope: InternalScope, name: str, *, session: @policy_filter @read_session -def get_scratch_policy(account: str, rses: Sequence[dict[str, Any]], lifetime: int, *, session: "Session") -> int: +def get_scratch_policy( + account: InternalAccount, + rses: Sequence[dict[str, Any]], + lifetime: Optional[int], + *, + session: "Session" +) -> Optional[int]: """ ATLAS policy for rules on SCRATCHDISK diff --git a/lib/rucio/core/rule_grouping.py b/lib/rucio/core/rule_grouping.py index 6b421e9a1e..9eca1381e8 100644 --- a/lib/rucio/core/rule_grouping.py +++ b/lib/rucio/core/rule_grouping.py @@ -26,6 +26,7 @@ import rucio.core.replica from rucio.common.config import config_get_int from rucio.common.exception import InsufficientTargetRSEs +from rucio.common.types import InternalScope from rucio.core import account_counter, rse_counter, request as request_core from rucio.core.rse_selector import RSESelector from rucio.core.rse import get_rse, get_rse_attribute, get_rse_name @@ -38,13 +39,19 @@ @transactional_session -def apply_rule_grouping(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], models.ReplicaLock], - replicas: dict[tuple[str, str], Any], source_replicas: dict[tuple[str, str], Any], - rseselector: RSESelector, rule: models.ReplicationRule, preferred_rse_ids: Sequence[str] = [], - source_rses: Sequence[str] = [], *, - session: "Session") -> tuple[dict[str, list[dict[str, models.RSEFileAssociation]]], - dict[str, list[dict[str, models.ReplicaLock]]], - list[dict[str, Any]]]: +def apply_rule_grouping( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], models.ReplicaLock], + replicas: dict[tuple[InternalScope, str], Any], + source_replicas: dict[tuple[InternalScope, str], Any], + rseselector: RSESelector, rule: models.ReplicationRule, + preferred_rse_ids: Sequence[str] = [], + source_rses: Sequence[str] = [], + *, + session: "Session" +) -> tuple[dict[str, list[dict[str, models.RSEFileAssociation]]], + dict[str, list[dict[str, models.ReplicaLock]]], + list[dict[str, Any]]]: """ Apply rule grouping to files. @@ -104,13 +111,19 @@ def apply_rule_grouping(datasetfiles: Sequence[dict[str, Any]], locks: dict[tupl @transactional_session -def repair_stuck_locks_and_apply_rule_grouping(datasetfiles: Sequence[dict[str, Any]], locks: dict[tuple[str, str], models.ReplicaLock], - replicas: dict[tuple[str, str], Any], source_replicas: dict[tuple[str, str], Any], - rseselector: RSESelector, rule: models.ReplicationRule, source_rses: Sequence[str], *, - session: "Session") -> tuple[dict[str, list[dict[str, models.RSEFileAssociation]]], - dict[str, list[dict[str, models.ReplicaLock]]], - list[dict[str, Any]], - dict[str, list[dict[str, models.ReplicaLock]]]]: +def repair_stuck_locks_and_apply_rule_grouping( + datasetfiles: Sequence[dict[str, Any]], + locks: dict[tuple[InternalScope, str], models.ReplicaLock], + replicas: dict[tuple[InternalScope, str], Any], + source_replicas: dict[tuple[InternalScope, str], Any], + rseselector: RSESelector, rule: models.ReplicationRule, + source_rses: Sequence[str], + *, + session: "Session" +) -> tuple[dict[str, list[dict[str, models.RSEFileAssociation]]], + dict[str, list[dict[str, models.ReplicaLock]]], + list[dict[str, Any]], + dict[str, list[dict[str, models.ReplicaLock]]]]: """ Apply rule grouping to files. diff --git a/lib/rucio/daemons/judge/injector.py b/lib/rucio/daemons/judge/injector.py index d6189ca6ef..91432a2822 100644 --- a/lib/rucio/daemons/judge/injector.py +++ b/lib/rucio/daemons/judge/injector.py @@ -85,9 +85,8 @@ def run_once(paused_rules, heartbeat_handler, **_kwargs): logger(logging.DEBUG, 'did not get any work (paused_rules=%s)' % str(len(paused_rules))) return - for rule in rules: + for rule_id in rules: _, _, logger = heartbeat_handler.live() - rule_id = rule[0] logger(logging.INFO, 'Injecting rule %s' % rule_id) if graceful_stop.is_set(): break diff --git a/lib/rucio/daemons/judge/repairer.py b/lib/rucio/daemons/judge/repairer.py index b797f202b0..26d3250ed2 100644 --- a/lib/rucio/daemons/judge/repairer.py +++ b/lib/rucio/daemons/judge/repairer.py @@ -92,7 +92,6 @@ def run_once(paused_rules, delta, heartbeat_handler, **_kwargs): for rule_id in rules: _, _, logger = heartbeat_handler.live() - rule_id = rule_id[0] logger(logging.INFO, 'Repairing rule %s' % (rule_id)) if graceful_stop.is_set(): break diff --git a/lib/rucio/vcsversion.py b/lib/rucio/vcsversion.py index 8c1cf22771..a2a702abf0 100644 --- a/lib/rucio/vcsversion.py +++ b/lib/rucio/vcsversion.py @@ -4,8 +4,8 @@ ''' VERSION_INFO = { 'final': True, - 'version': '33.0.0', + 'version': '34.0.0rc1', 'branch_nick': 'master', - 'revision_id': '0400523747c2c211473ed86e9644c7a6fca846ff', - 'revno': 12668 + 'revision_id': '5b11b2faf62100dd846195ae2317d2a5605a100f', + 'revno': 12735 } diff --git a/lib/rucio/web/rest/flaskapi/v1/auth.py b/lib/rucio/web/rest/flaskapi/v1/auth.py index a8e4f86e0a..b8ca7fbe7b 100644 --- a/lib/rucio/web/rest/flaskapi/v1/auth.py +++ b/lib/rucio/web/rest/flaskapi/v1/auth.py @@ -26,17 +26,16 @@ from rucio.api.authentication import get_auth_token_user_pass, get_auth_token_gss, get_auth_token_x509, \ get_auth_token_ssh, get_ssh_challenge_token, validate_auth_token, get_auth_oidc, redirect_auth_oidc, \ get_token_oidc, refresh_cli_auth_token, get_auth_token_saml -from rucio.api.identity import list_accounts_for_identity, get_default_account, verify_identity from rucio.common.config import config_get from rucio.common.exception import AccessDenied, IdentityError, IdentityNotFound, CannotAuthenticate, CannotAuthorize from rucio.common.extra import import_extras from rucio.common.utils import date_to_str from rucio.core.authentication import strip_x509_proxy_attributes from rucio.web.rest.flaskapi.v1.common import check_accept_header_wrapper_flask, error_headers, \ - extract_vo, generate_http_error_flask, ErrorHandlingMethodView + extract_vo, generate_http_error_flask, ErrorHandlingMethodView, get_account_from_verified_identity if TYPE_CHECKING: - from typing import Optional + from typing import Optional, Union from rucio.web.rest.flaskapi.v1.common import HeadersType EXTRA_MODULES = import_extras(['onelogin']) @@ -60,7 +59,7 @@ def get_headers(self) -> "Optional[HeadersType]": headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token, X-Rucio-Auth-Token-Expires, X-Rucio-Auth-Account, X-Rucio-Auth-Accounts' return headers - def options(self): + def options(self) -> tuple[str, int, "Optional[HeadersType]"]: """ --- summary: UserPass Allow cross-site scripting @@ -96,7 +95,7 @@ def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) - def get(self): + def get(self) -> 'Union[Response, tuple[str, int, "Optional[HeadersType]"]]': """ --- summary: UserPass @@ -185,41 +184,43 @@ def get(self): password = request.headers.get('X-Rucio-Password', default=None) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) - if not username or not password: return generate_http_error_flask(401, CannotAuthenticate.__name__, 'Cannot authenticate without passing all required arguments', headers=headers) + accounts: list[str] = [] if not account: - accounts = list_accounts_for_identity(identity_key=username, id_type='USERPASS') - if accounts is None or len(accounts) == 0: - try: - verify_identity(identity_key=username, id_type='USERPASS', password=password) - except IdentityNotFound: - return generate_http_error_flask(401, IdentityNotFound.__name__, 'Cannot authenticate. Username/Password pair does not exist.', headers=headers) - except IdentityError: - return generate_http_error_flask(401, IdentityError.__name__, 'Cannot authenticate. The identity does not exist.', headers=headers) - return generate_http_error_flask(401, CannotAuthenticate.__name__, 'Cannot authenticate with provided username or password. Identity is not mapped to any accounts.', headers=headers) - if len(accounts) > 1: - try: - account = get_default_account(identity_key=username, id_type='USERPASS') - except IdentityError: - headers['X-Rucio-Auth-Accounts'] = ','.join(accounts) - return json.dumps(accounts), 206, headers - else: - account = accounts[0] + try: + accounts = get_account_from_verified_identity(identity_key=username, id_type='USERPASS', password=password) + except IdentityNotFound: + return generate_http_error_flask(401, IdentityNotFound.__name__, 'Cannot authenticate. Username/Password pair does not exist.', headers=headers) + except IdentityError: + return generate_http_error_flask(401, IdentityError.__name__, 'Cannot authenticate. The identity does not exist.', headers=headers) + else: + accounts = [account] + + if len(accounts) > 1: + account_names: list[str] = [] + for account in accounts: + if isinstance(account, str): + account_names.append(account) + else: + account_names.append(account.external) + headers['X-Rucio-Auth-Accounts'] = ','.join(accounts) + return json.dumps(account_names), 206, headers + + account = accounts[0] account_name = account if isinstance(account, str) else account.external try: result = get_auth_token_user_pass(account_name, username, password, appid, ip, vo=vo) + if not result: + return generate_http_error_flask(401, CannotAuthenticate.__name__, f'Cannot authenticate to account {account} with given credentials', headers=headers) + headers['X-Rucio-Auth-Account'] = account_name + headers['X-Rucio-Auth-Token'] = result['token'] + headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) + return '', 200, headers except AccessDenied: return generate_http_error_flask(401, CannotAuthenticate.__name__, f'Cannot authenticate to account {account} with given credentials', headers=headers) - if not result: - return generate_http_error_flask(401, CannotAuthenticate.__name__, f'Cannot authenticate to account {account} with given credentials', headers=headers) - headers['X-Rucio-Auth-Account'] = account_name - headers['X-Rucio-Auth-Token'] = result['token'] - headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) - return '', 200, headers - class OIDC(ErrorHandlingMethodView): """ @@ -945,7 +946,7 @@ def get_headers(self) -> "Optional[HeadersType]": headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token, X-Rucio-Auth-Token-Expires, X-Rucio-Auth-Account, X-Rucio-Auth-Accounts' return headers - def options(self): + def options(self) -> tuple[str, int, "Optional[HeadersType]"]: """ --- summary: x509 Allow cross-site scripting @@ -980,7 +981,7 @@ def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) - def get(self): + def get(self) -> 'Union[Response, tuple[str, int, "Optional[HeadersType]"]]': """ --- summary: x509 @@ -1048,39 +1049,45 @@ def get(self): ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) return_multiple_accounts = request.headers.get('X-Rucio-Allow-Return-Multiple-Accounts', default=None) + accounts: list[str] = [] + if not account: + try: + accounts = get_account_from_verified_identity(identity_key=dn, id_type='X509') + except IdentityError as e: + return generate_http_error_flask(401, IdentityError.__name__, str(e), headers=headers) + else: + accounts = [account] + + if len(accounts) > 1: + if return_multiple_accounts is None or return_multiple_accounts.lower() != 'true': + return generate_http_error_flask(401, CannotAuthenticate.__name__, 'Multiple accounts associated with the provided identity', headers=headers) + account_names: list[str] = [] + for account in accounts: + if isinstance(account, str): + account_names.append(account) + else: + account_names.append(account.external) + headers['X-Rucio-Auth-Accounts'] = ','.join(accounts) + return json.dumps(account_names), 206, headers + account = accounts[0] + account_name = account if isinstance(account, str) else account.external result = None try: - result = get_auth_token_x509(account, dn, appid, ip, vo=vo) + result = get_auth_token_x509(account_name, dn, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, - exc_msg=f'Cannot authenticate to account {account} with given credentials', + exc_msg=f'Cannot authenticate to account {account_name} with given credentials', + headers=headers + ) + except IdentityError as e: + return generate_http_error_flask( + status_code=401, + exc=CannotAuthenticate.__name__, + exc_msg=str(e), headers=headers ) - except IdentityError: - if not return_multiple_accounts: - return generate_http_error_flask( - status_code=401, - exc=CannotAuthenticate.__name__, - exc_msg=f'No default account set for {dn}', - headers=headers - ) - accounts = list_accounts_for_identity(identity_key=dn, id_type='X509') - if len(accounts) == 1: - account = accounts[0] - account_name = account if isinstance(account, str) else account.external - result = get_auth_token_x509(account_name, dn, appid, ip, vo=vo) - elif len(accounts) > 1: - headers['X-Rucio-Auth-Accounts'] = ','.join(accounts) - return json.dumps(accounts), 206, headers - else: - return generate_http_error_flask( - status_code=401, - exc=CannotAuthenticate.__name__, - exc_msg=f'No account set for {dn}', - headers=headers - ) if not result: return generate_http_error_flask( diff --git a/lib/rucio/web/rest/flaskapi/v1/common.py b/lib/rucio/web/rest/flaskapi/v1/common.py index 43684c6cf2..d629d8bb8a 100644 --- a/lib/rucio/web/rest/flaskapi/v1/common.py +++ b/lib/rucio/web/rest/flaskapi/v1/common.py @@ -29,15 +29,16 @@ from werkzeug.wrappers import Request, Response from rucio.api.authentication import validate_auth_token +from rucio.api.identity import list_accounts_for_identity, get_default_account, verify_identity from rucio.common import config -from rucio.common.exception import DatabaseException, RucioException, CannotAuthenticate, UnsupportedRequestedContentType +from rucio.common.exception import DatabaseException, IdentityError, RucioException, CannotAuthenticate, UnsupportedRequestedContentType from rucio.common.schema import get_schema_value from rucio.common.utils import generate_uuid, render_json from rucio.core.vo import map_vo if TYPE_CHECKING: from collections.abc import Callable, Iterable, Sequence - from typing import Optional, Union, Any + from typing import Optional, Union, Literal, Any HeadersType = Union[Headers, dict[str, str], Sequence[tuple[str, str]]] @@ -383,3 +384,32 @@ def extract_vo(headers: "HeadersType") -> "str": except RucioException as err: # VO Name doesn't match allowed spec flask.abort(generate_http_error_flask(status_code=400, exc=err)) + + +def get_account_from_verified_identity(identity_key, id_type: 'Literal["USERPASS", "X509"]', password: 'Union[str, None]' = None) -> list: + """ Verifies the provided identity and tries to return a matching account. + If no account is found, raises an IdentityError after trying to verify the identity. + If multiple accounts are found, returns the default account if available, otherwise all accounts. + :param identity_key: The identity key name. For example x509 DN, or a username. + :param id_type: The type of the authentication (x509, USERPASS). + :param password: required only if id_type==USERPASS. + :raises IdentityError: if no account is found for the identity or if the identity could not be verified. + :returns: a list of account names. + """ + accounts = list_accounts_for_identity(identity_key=identity_key, id_type=id_type) + if accounts is None or len(accounts) == 0: + if id_type == 'USERPASS': + verify_identity(identity_key=identity_key, id_type=id_type, password=password) + elif id_type == 'X509': + verify_identity(identity_key=identity_key, id_type=id_type) + else: + raise IdentityError('No account found for identity') + if len(accounts) > 1: + try: + default_account = get_default_account(identity_key=identity_key, id_type=id_type) + return [default_account] + except IdentityError: + return accounts + else: + account = accounts[0] + return [account] diff --git a/lib/rucio/web/ui/static/webui_version b/lib/rucio/web/ui/static/webui_version index cef8101081..2552b6687e 100644 --- a/lib/rucio/web/ui/static/webui_version +++ b/lib/rucio/web/ui/static/webui_version @@ -1 +1 @@ -33.0.0 \ No newline at end of file +34.0.0rc1 \ No newline at end of file