diff --git a/phabfive/__init__.py b/phabfive/__init__.py index a57c3fd..6e2f58a 100644 --- a/phabfive/__init__.py +++ b/phabfive/__init__.py @@ -19,7 +19,8 @@ def init_logging(log_level): if isinstance(_log_level, str): print( - "CRITICAL: Undefined log-level set, please use any of the defined log levels inside Python logging module" + "CRITICAL - Undefined log-level set, please use any of the defined log levels inside Python logging module", + file=sys.stderr, ) sys.exit(1) @@ -30,6 +31,7 @@ def init_logging(log_level): logging_conf = { "version": 1, + "disable_existing_loggers": False, "root": { "level": log_level, "handlers": ["console"], diff --git a/phabfive/cli.py b/phabfive/cli.py index 8fc4c6a..b6b7144 100644 --- a/phabfive/cli.py +++ b/phabfive/cli.py @@ -1,12 +1,18 @@ # -*- coding: utf-8 -*- # python std lib +import logging import re import sys # 3rd party imports from docopt import DocoptExit, Option, docopt, extras +# phabfive imports +from phabfive.constants import MONOGRAMS + +log = logging.getLogger(__name__) + base_args = """ Usage: phabfive [options] [ ...] @@ -28,6 +34,9 @@ Options: --log-level=LEVEL Set loglevel [default: INFO] + --format=FORMAT Output format: rich (default), tree, or strict [default: rich] + --ascii=WHEN Use ASCII instead of Unicode (always/auto/never) [default: auto] + --hyperlink=WHEN Enable terminal hyperlinks (always/auto/never) [default: auto] -h, --help Show this help message and exit -V, --version Display the version number and exit """ @@ -254,9 +263,6 @@ # Using YAML templates phabfive maniphest search --with templates/task-search/tasks-resolved-but-not-in-done.yaml phabfive maniphest search --with templates/task-search/search-template.yaml --tag Override-Project - - # Requires at least one filter (text, tag, date, column, priority, or status) - phabfive maniphest search # ERROR: not specific enough """ @@ -285,8 +291,6 @@ def parse_cli(): argv = [cli_args[""]] + cli_args[""] - from phabfive.constants import MONOGRAMS - patterns = re.compile("^(?:" + "|".join(MONOGRAMS.values()) + ")") # First check for monogram shortcuts, i.e. invocation with `phabfive K123` @@ -384,10 +388,40 @@ def run(cli_args, sub_args): # Local imports required due to logging limitation from phabfive import diffusion, maniphest, passphrase, paste, repl, user from phabfive.constants import REPO_STATUS_CHOICES + from phabfive.core import Phabfive from phabfive.exceptions import PhabfiveException from phabfive.maniphest_transitions import parse_transition_patterns from phabfive.priority_transitions import parse_priority_patterns + # Validate and process output options + valid_modes = ("always", "auto", "never") + valid_formats = ("rich", "tree", "strict") + output_format = cli_args.get("--format", "rich") + ascii_when = cli_args.get("--ascii", "never") + hyperlink_when = cli_args.get("--hyperlink", "never") + + if output_format not in valid_formats: + sys.exit(f"ERROR - --format must be one of: {', '.join(valid_formats)}") + if ascii_when not in valid_modes: + sys.exit(f"ERROR - --ascii must be one of: {', '.join(valid_modes)}") + if hyperlink_when not in valid_modes: + sys.exit(f"ERROR - --hyperlink must be one of: {', '.join(valid_modes)}") + + # Check mutual exclusivity + if ascii_when == "always" and hyperlink_when == "always": + sys.exit("ERROR - --ascii=always and --hyperlink=always are mutually exclusive") + if output_format == "strict" and hyperlink_when == "always": + sys.exit( + "ERROR - --format=strict and --hyperlink=always are mutually exclusive" + ) + + # Set output formatting options + Phabfive.set_output_options( + ascii_when=ascii_when, + hyperlink_when=hyperlink_when, + output_format=output_format, + ) + retcode = 0 try: @@ -527,16 +561,18 @@ def run(cli_args, sub_args): return retcode else: # Create a single search config from CLI parameters - search_configs = [{ - 'search': {}, - 'title': 'Command Line Search', - 'description': None - }] + search_configs = [ + { + "search": {}, + "title": "Command Line Search", + "description": None, + } + ] # Helper function to get value with CLI override priority def get_param(cli_key, yaml_params, yaml_key=None, default=None): if yaml_key is None: - yaml_key = cli_key.lstrip('-') + yaml_key = cli_key.lstrip("-") # CLI takes precedence over YAML cli_value = sub_args.get(cli_key) @@ -548,13 +584,16 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): # Execute each search configuration for i, config in enumerate(search_configs): - yaml_params = config['search'] + yaml_params = config["search"] # Print search header if multiple searches or if title/description provided - if len(search_configs) > 1 or config['title'] != 'Command Line Search': + if ( + len(search_configs) > 1 + or config["title"] != "Command Line Search" + ): print(f"\n{'=' * 60}") print(f"🔍 {config['title']}") - if config['description']: + if config["description"]: print(f"📝 {config['description']}") print(f"{'=' * 60}") @@ -563,7 +602,9 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): column_pattern = get_param("--column", yaml_params, "column") if column_pattern: try: - transition_patterns = parse_transition_patterns(column_pattern) + transition_patterns = parse_transition_patterns( + column_pattern + ) except Exception as e: print( f"ERROR: Invalid column filter pattern in {config['title']}: {e}", @@ -576,7 +617,9 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): priority_pattern = get_param("--priority", yaml_params, "priority") if priority_pattern: try: - priority_patterns = parse_priority_patterns(priority_pattern) + priority_patterns = parse_priority_patterns( + priority_pattern + ) except Exception as e: print( f"ERROR: Invalid priority filter pattern in {config['title']}: {e}", @@ -590,8 +633,10 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): if status_pattern: try: # Parse status patterns with API-fetched status ordering - status_patterns = maniphest_app.parse_status_patterns_with_api( - status_pattern + status_patterns = ( + maniphest_app.parse_status_patterns_with_api( + status_pattern + ) ) except Exception as e: print( @@ -602,12 +647,37 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): return retcode # Get other parameters with CLI override priority - show_history = get_param("--show-history", yaml_params, "show-history", False) - show_metadata = get_param("--show-metadata", yaml_params, "show-metadata", False) + show_history = get_param( + "--show-history", yaml_params, "show-history", False + ) + show_metadata = get_param( + "--show-metadata", yaml_params, "show-metadata", False + ) text_query = get_param("", yaml_params, "text_query") tag = get_param("--tag", yaml_params, "tag") - created_after = get_param("--created-after", yaml_params, "created-after") - updated_after = get_param("--updated-after", yaml_params, "updated-after") + created_after = get_param( + "--created-after", yaml_params, "created-after" + ) + updated_after = get_param( + "--updated-after", yaml_params, "updated-after" + ) + + # Check if any search criteria provided, show usage if not + has_criteria = any( + [ + text_query, + tag, + created_after, + updated_after, + transition_patterns, + priority_patterns, + status_patterns, + ] + ) + if not has_criteria: + print("Usage:") + print(" phabfive maniphest search [] [options]") + return retcode maniphest_app.task_search( text_query=text_query, @@ -666,7 +736,17 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): if sub_args.get("show"): # Use new unified task_show() method - task_id = int(sub_args[""][1:]) + ticket_id = sub_args[""] + + # Validate ticket ID format using MONOGRAMS pattern + maniphest_pattern = f"^{MONOGRAMS['maniphest']}$" + if not re.match(maniphest_pattern, ticket_id): + log.critical( + f"Invalid task ID '{ticket_id}'. Expected format: T123" + ) + return 1 + + task_id = int(ticket_id[1:]) # Handle flags show_history = sub_args.get("--show-history", False) @@ -681,7 +761,7 @@ def get_param(cli_key, yaml_params, yaml_key=None, default=None): ) except PhabfiveException as e: # Catch all types of phabricator base exceptions - print(f"CRITICAL :: {str(e)}", file=sys.stderr) + log.critical(str(e)) retcode = 1 return retcode diff --git a/phabfive/core.py b/phabfive/core.py index ebdb43f..2fe73f4 100644 --- a/phabfive/core.py +++ b/phabfive/core.py @@ -16,12 +16,18 @@ DEFAULTS, CONFIGURABLES, ) -from phabfive.exceptions import PhabfiveConfigException, PhabfiveRemoteException +from phabfive.exceptions import ( + PhabfiveConfigException, + PhabfiveDataException, + PhabfiveRemoteException, +) # 3rd party imports import anyconfig import appdirs from phabricator import Phabricator, APIError +from rich.console import Console +from rich.text import Text log = logging.getLogger(__name__) @@ -29,6 +35,180 @@ class Phabfive: + # Output formatting options (set by CLI) + _ascii_when = "auto" + _hyperlink_when = "auto" + _output_format = "rich" + # Maximum line width for rich format (to prevent YAML breaking) + MAX_LINE_WIDTH = 4096 + + @classmethod + def set_output_options( + cls, ascii_when="auto", hyperlink_when="auto", output_format="rich" + ): + """Set global output formatting options.""" + cls._ascii_when = ascii_when + cls._hyperlink_when = hyperlink_when + cls._output_format = output_format + + @staticmethod + def _should_use_ascii(): + """Determine if ASCII mode should be used based on terminal capabilities.""" + import sys + import locale + + # Check if stdout is a TTY + if not sys.stdout.isatty(): + return True + + # Check locale encoding + try: + encoding = locale.getpreferredencoding(False).lower() + if "utf" not in encoding: + return True + except Exception: + return True + + return False + + @staticmethod + def _should_use_hyperlink(): + """Determine if hyperlinks should be used based on terminal capabilities. + + There's no standard query for OSC 8 support, so we check for known + supporting terminals via environment variables. + """ + import sys + import os + + # Must be a TTY + if not sys.stdout.isatty(): + return False + + # Check for terminals known to support OSC 8 + term = os.environ.get("TERM", "") + term_program = os.environ.get("TERM_PROGRAM", "") + colorterm = os.environ.get("COLORTERM", "") + + # Known supporting terminal programs + if term_program in ( + "iTerm.app", + "WezTerm", + "vscode", + "Hyper", + "mintty", + "ghostty", + ): + return True + + # Windows Terminal + if os.environ.get("WT_SESSION"): + return True + + # VTE-based terminals (GNOME Terminal, Tilix, Terminator, etc.) + # VTE >= 0.50 supports OSC 8 (version 5000+) + vte_version = os.environ.get("VTE_VERSION", "") + if vte_version.isdigit() and int(vte_version) >= 5000: + return True + + # KDE Konsole (version 22.04+ has good OSC 8 support) + if os.environ.get("KONSOLE_VERSION"): + return True + + # Terminals identifiable by TERM + if any(t in term for t in ("kitty", "alacritty", "foot", "contour")): + return True + + # COLORTERM=truecolor is a reasonable proxy for modern terminals + if colorterm in ("truecolor", "24bit"): + return True + + return False + + def _is_ascii_enabled(self): + """Check if ASCII mode is currently enabled.""" + if self._ascii_when == "always": + return True + if self._ascii_when == "auto": + return self._should_use_ascii() + return False + + def _is_hyperlink_enabled(self): + """Check if hyperlink mode is currently enabled.""" + if self._hyperlink_when == "always": + return True + if self._hyperlink_when == "auto": + return self._should_use_hyperlink() + return False + + def format_direction(self, direction): + """Format direction indicator based on output mode.""" + if self._is_ascii_enabled(): + mapping = {"•": "-", "↑": "^", "↓": "v", "→": ">", "←": "<"} + return mapping.get(direction, direction) + return direction + + def format_link(self, url, text, show_url=True): + """Format URL as Rich Text with hyperlink if enabled. + + Parameters + ---------- + url : str + The URL to link to + text : str + The visible text for the link + show_url : bool + If True and hyperlinks disabled, return url. If False, return text. + + Returns + ------- + Text or str + Rich Text object with link styling, or plain string if disabled + """ + if self._is_hyperlink_enabled(): + t = Text(text) + t.stylize(f"link {url}") + return t + return url if show_url else text + + def get_console(self): + """Get a Rich Console instance for output.""" + # Use our hyperlink detection to force terminal mode + # This ensures Rich outputs hyperlinks when our detection says the terminal supports them + force_terminal = self._is_hyperlink_enabled() + no_color = self._ascii_when == "always" + # Use large width to prevent soft-wrapping which breaks YAML output + return Console( + force_terminal=force_terminal, no_color=no_color, width=self.MAX_LINE_WIDTH + ) + + def check_line_width(self, value, field_name="field"): + """Check if a value exceeds the maximum line width for rich format. + + Parameters + ---------- + value : any + The value to check (will be converted to string) + field_name : str + Name of the field for error messages + + Raises + ------ + PhabfiveDataException + If the value exceeds MAX_LINE_WIDTH and output format is 'rich' + """ + if self._output_format != "rich": + return # Only applies to rich format + + str_value = str(value) if value is not None else "" + # Check each line in case of multi-line values + for i, line in enumerate(str_value.split("\n")): + if len(line) > self.MAX_LINE_WIDTH: + raise PhabfiveDataException( + f"{field_name} line {i + 1} exceeds maximum width of {self.MAX_LINE_WIDTH} characters " + f"(length: {len(line)}). Use --format=strict for guaranteed valid YAML output." + ) + def __init__(self): """ """ self.conf = self.load_config() diff --git a/phabfive/maniphest.py b/phabfive/maniphest.py index 845bd1e..a7f5145 100644 --- a/phabfive/maniphest.py +++ b/phabfive/maniphest.py @@ -17,6 +17,8 @@ from jinja2 import Environment, Template, meta # 3rd party imports +from rich.text import Text +from rich.tree import Tree from ruamel.yaml import YAML from ruamel.yaml.scalarstring import PreservedScalarString @@ -240,17 +242,29 @@ def _fetch_all_transactions( Returns ------- dict - Dictionary with keys 'columns', 'priority', 'status', 'assignee', 'comments' + Dictionary with keys 'columns', 'priority', 'status', 'assignee', and 'comments', each containing a list of transaction dicts with keys: - oldValue: previous value (format depends on transaction type) - newValue: new value (format depends on transaction type) - dateCreated: timestamp (int) For comments: authorPHID, text, dateCreated """ - result_dict = {"columns": [], "priority": [], "status": [], "assignee": [], "comments": []} + result_dict = { + "columns": [], + "priority": [], + "status": [], + "assignee": [], + "comments": [], + } # Early return if nothing requested - if not (need_columns or need_priority or need_status or need_assignee or need_comments): + if not ( + need_columns + or need_priority + or need_status + or need_assignee + or need_comments + ): return result_dict try: @@ -1358,19 +1372,20 @@ def _build_priority_transitions(self, priority_transactions): new_priority_name = new_value if new_value else "Unknown" # Determine if raised or lowered - direction = "[•]" + direction = f"[{self.format_direction('•')}]" if old_value and new_value: old_order = get_priority_order(old_value) new_order = get_priority_order(new_value) if old_order is not None and new_order is not None: if new_order < old_order: # Raised (higher priority) - direction = "[↑]" + direction = f"[{self.format_direction('↑')}]" elif new_order > old_order: # Lowered (lower priority) - direction = "[↓]" + direction = f"[{self.format_direction('↓')}]" + arrow = self.format_direction("→") transitions.append( - f"{timestamp_str} {direction} {old_priority_name} → {new_priority_name}" + f"{timestamp_str} {direction} {old_priority_name} {arrow} {new_priority_name}" ) return transitions @@ -1404,7 +1419,9 @@ def _build_comments(self, comment_transactions, task_id): author_map = {} if author_phids: try: - result = self.phab.user.search(constraints={"phids": list(author_phids)}) + result = self.phab.user.search( + constraints={"phids": list(author_phids)} + ) author_map = { u["phid"]: u["fields"]["username"] for u in result.get("data", []) } @@ -1425,7 +1442,9 @@ def _build_comments(self, comment_transactions, task_id): text = trans.get("text", "") # Format timestamp - timestamp_str = format_timestamp(date_created) if date_created else "Unknown" + timestamp_str = ( + format_timestamp(date_created) if date_created else "Unknown" + ) # Format: compact for single-line, block scalar for multi-line # Include comment reference (T5@93) for future edit/remove commands @@ -1484,7 +1503,7 @@ def _build_status_transitions(self, status_transactions): new_status_name = new_value if new_value else "Unknown" # Determine if raised (progressed) or lowered (regressed) - direction = "[•]" + direction = f"[{self.format_direction('•')}]" if old_value and new_value: # Get status info from API api_response = self._get_api_status_map() @@ -1493,12 +1512,13 @@ def _build_status_transitions(self, status_transactions): if old_order is not None and new_order is not None: if new_order > old_order: # Raised (progressed forward) - direction = "[↑]" + direction = f"[{self.format_direction('↑')}]" elif new_order < old_order: # Lowered (moved backward) - direction = "[↓]" + direction = f"[{self.format_direction('↓')}]" + arrow = self.format_direction("→") transitions.append( - f"{timestamp_str} {direction} {old_status_name} → {new_status_name}" + f"{timestamp_str} {direction} {old_status_name} {arrow} {new_status_name}" ) return transitions @@ -1556,7 +1576,11 @@ def _build_assignee_transitions(self, assignee_transactions): old_name = user_map.get(old_value, "(none)") if old_value else "(none)" new_name = user_map.get(new_value, "(none)") if new_value else "(none)" - transitions.append(f"{timestamp_str} [•] {old_name} → {new_name}") + direction = f"[{self.format_direction('•')}]" + arrow = self.format_direction("→") + transitions.append( + f"{timestamp_str} {direction} {old_name} {arrow} {new_name}" + ) return transitions @@ -1611,17 +1635,18 @@ def _build_column_transitions(self, transactions, column_info): new_col_name = "Unknown" # Determine if forward or backward - direction = "[•]" + direction = f"[{self.format_direction('•')}]" if old_value and new_value and len(old_value) > 1 and len(new_value) > 1: old_seq = column_info.get(old_value[1], {}).get("sequence", 0) new_seq = column_info.get(new_value[1], {}).get("sequence", 0) if new_seq > old_seq: - direction = "[→]" + direction = f"[{self.format_direction('→')}]" elif new_seq < old_seq: - direction = "[←]" + direction = f"[{self.format_direction('←')}]" + arrow = self.format_direction("→") transitions_list.append( - f"{timestamp_str} {direction} {old_col_name} → {new_col_name}" + f"{timestamp_str} {direction} {old_col_name} {arrow} {new_col_name}" ) return transitions_list @@ -1667,8 +1692,13 @@ def _build_task_boards( # Get current column only columns = board_data.get("columns", []) if columns: - column_name = columns[0].get("name", "Unknown") - boards_dict[project_name] = {"Column": column_name} + column_data = columns[0] + column_name = column_data.get("name", "Unknown") + column_phid = column_data.get("phid", "") + boards_dict[project_name] = { + "Column": column_name, + "_column_phid": column_phid, + } return boards_dict @@ -1902,7 +1932,9 @@ def _format_and_display_tasks( owner_map = {} if owner_phids: - user_result = self.phab.user.search(constraints={"phids": list(owner_phids)}) + user_result = self.phab.user.search( + constraints={"phids": list(owner_phids)} + ) owner_map = { u["phid"]: u["fields"]["username"] for u in user_result.get("data", []) } @@ -1913,8 +1945,14 @@ def _format_and_display_tasks( for item in result_data: fields = item.get("fields", {}) - # Build task dict - task_dict = {"Link": f"{self.url}/T{item['id']}", "Task": {}} + # Build task dict - store URL and formatted link separately + url = f"{self.url}/T{item['id']}" + link_text = f"T{item['id']}" + task_dict = { + "_url": url, + "_link": self.format_link(url, link_text), + "Task": {}, + } # Build task fields task_data = {} @@ -1928,12 +1966,16 @@ def _format_and_display_tasks( # Priority task_data["Priority"] = fields.get("priority", {}).get("name", "Unknown") - # Assignee + # Assignee - store separately for direct printing (hyperlink support) owner_phid = fields.get("ownerPHID") if owner_phid: - task_data["Assignee"] = owner_map.get(owner_phid, owner_phid) + username = owner_map.get(owner_phid, owner_phid) + user_url = f"{self.url}/p/{username}/" + task_dict["_assignee"] = self.format_link( + user_url, username, show_url=False + ) else: - task_data["Assignee"] = "(none)" + task_dict["_assignee"] = "(none)" # Dates if fields.get("dateCreated"): @@ -1999,14 +2041,301 @@ def _format_and_display_tasks( tasks_list.append(task_dict) - # Output as YAML using ruamel.yaml for proper multi-line formatting - print() # Empty line for separation + # Display tasks using the appropriate format + console = self.get_console() + + try: + for task_dict in tasks_list: + if self._output_format == "tree": + self._display_task_tree(console, task_dict) + elif self._output_format == "strict": + self._display_task_strict(task_dict) + else: # "rich" (default) + self._display_task_yaml(console, task_dict) + except BrokenPipeError: + # Handle pipe closed by consumer (e.g., head, less) + # Quietly exit - this is normal behavior + sys.stderr.close() + sys.exit(0) + + return len(tasks_list) + + def _needs_yaml_quoting(self, value): + """Check if a string value needs YAML quoting. + + Values need quoting if they contain YAML special characters + that could be misinterpreted. + """ + if not isinstance(value, str): + return False + # YAML special chars: colon, braces, brackets, backticks, quotes, empty string + return value == "" or any(c in value for c in ":{}[]`'\"") + + def _display_task_yaml(self, console, task_dict): + """Display a single task in YAML-like format using Rich. + + Parameters + ---------- + console : Console + Rich Console instance for output + task_dict : dict + Task data dictionary with _link, _url, _assignee, Task, Boards, etc. + """ + # Extract internal fields + link = task_dict.get("_link") + assignee = task_dict.get("_assignee") + task_data = task_dict.get("Task", {}) + boards = task_dict.get("Boards", {}) + history = task_dict.get("History", {}) + metadata = task_dict.get("Metadata", {}) + + # Print link + console.print(Text.assemble("- Link: ", link)) + + # Print Task section + console.print(" Task:") + for key, value in task_data.items(): + # Check line width before printing + self.check_line_width(value, f"Task.{key}") + + if isinstance(value, (str, PreservedScalarString)) and "\n" in str(value): + # Multi-line value + console.print(f" {key}: |-") + for line in str(value).splitlines(): + console.print(f" {line}") + elif self._needs_yaml_quoting(value): + escaped = value.replace("'", "''") + console.print(f" {key}: '{escaped}'") + else: + console.print(f" {key}: {value}") + + # Print Assignee + if assignee: + console.print(Text.assemble(" Assignee: ", assignee)) + + # Print Boards with clickable names + if boards: + console.print(" Boards:") + for board_name, board_data in boards.items(): + project_slug = board_name.lower().replace(" ", "-") + board_url = f"{self.url}/tag/{project_slug}/" + board_link = self.format_link(board_url, board_name, show_url=False) + console.print(Text.assemble(" ", board_link, ":")) + + if isinstance(board_data, dict): + for key, value in board_data.items(): + if key.startswith("_"): + continue + if key == "Column": + column_phid = board_data.get("_column_phid", "") + needs_quoting = self._needs_yaml_quoting(value) + if column_phid: + query_url = ( + f"{self.url}/maniphest/?columns={column_phid}" + ) + column_link = self.format_link( + query_url, value, show_url=False + ) + if needs_quoting: + # When hyperlinks enabled, column_link is Text; when disabled, it's str + if isinstance(column_link, Text): + console.print( + Text.assemble( + " Column: '", column_link, "'" + ) + ) + else: + escaped = column_link.replace("'", "''") + console.print(f" Column: '{escaped}'") + else: + console.print( + Text.assemble(" Column: ", column_link) + ) + continue + if self._needs_yaml_quoting(value): + escaped = value.replace("'", "''") + console.print(f" {key}: '{escaped}'") + else: + console.print(f" {key}: {value}") + + # Print History section + if history: + console.print(" History:") + for hist_key, hist_value in history.items(): + if hist_key == "Boards" and isinstance(hist_value, dict): + console.print(" Boards:") + for board_name, transitions in hist_value.items(): + console.print(f" {board_name}:") + for trans in transitions: + console.print(f" - {trans}") + elif isinstance(hist_value, list): + console.print(f" {hist_key}:") + for trans in hist_value: + console.print(f" - {trans}") + + # Print Metadata section + if metadata: + console.print(" Metadata:") + for meta_key, meta_value in metadata.items(): + if isinstance(meta_value, list): + if meta_value: + console.print(f" {meta_key}:") + for item in meta_value: + console.print(f" - {item}") + else: + console.print(f" {meta_key}: []") + else: + console.print(f" {meta_key}: {meta_value}") + + def _display_task_tree(self, console, task_dict): + """Display a single task in tree format using Rich Tree. + + Parameters + ---------- + console : Console + Rich Console instance for output + task_dict : dict + Task data dictionary with _link, _url, _assignee, Task, Boards, etc. + """ + # Extract internal fields + link = task_dict.get("_link") + assignee = task_dict.get("_assignee") + task_data = task_dict.get("Task", {}) + boards = task_dict.get("Boards", {}) + history = task_dict.get("History", {}) + metadata = task_dict.get("Metadata", {}) + + # Create tree with task link as root + tree = Tree(link) + + # Add Task section + task_branch = tree.add("Task") + for key, value in task_data.items(): + if isinstance(value, (str, PreservedScalarString)) and "\n" in str(value): + # Truncate multi-line descriptions in tree view + first_line = str(value).split("\n")[0] + if len(first_line) > 60: + first_line = first_line[:57] + "..." + task_branch.add(f"{key}: {first_line}") + else: + task_branch.add(f"{key}: {value}") + + # Add Assignee + if assignee: + task_branch.add(Text.assemble("Assignee: ", assignee)) + + # Add Boards section + if boards: + boards_branch = tree.add("Boards") + for board_name, board_data in boards.items(): + project_slug = board_name.lower().replace(" ", "-") + board_url = f"{self.url}/tag/{project_slug}/" + board_link = self.format_link(board_url, board_name, show_url=False) + board_branch = boards_branch.add(board_link) + + if isinstance(board_data, dict): + for key, value in board_data.items(): + if key.startswith("_"): + continue + if key == "Column": + column_phid = board_data.get("_column_phid", "") + if column_phid: + query_url = ( + f"{self.url}/maniphest/?columns={column_phid}" + ) + column_link = self.format_link( + query_url, value, show_url=False + ) + board_branch.add(Text.assemble("Column: ", column_link)) + continue + board_branch.add(f"{key}: {value}") + + # Add History section + if history: + history_branch = tree.add("History") + for hist_key, hist_value in history.items(): + if hist_key == "Boards" and isinstance(hist_value, dict): + boards_hist = history_branch.add("Boards") + for board_name, transitions in hist_value.items(): + board_hist = boards_hist.add(board_name) + for trans in transitions: + board_hist.add(trans) + elif isinstance(hist_value, list): + hist_type_branch = history_branch.add(hist_key) + for trans in hist_value: + hist_type_branch.add(trans) + + # Add Metadata section + if metadata: + meta_branch = tree.add("Metadata") + for meta_key, meta_value in metadata.items(): + if isinstance(meta_value, list): + if meta_value: + list_branch = meta_branch.add(meta_key) + for item in meta_value: + list_branch.add(str(item)) + else: + meta_branch.add(f"{meta_key}: []") + else: + meta_branch.add(f"{meta_key}: {meta_value}") + + console.print(tree) + + def _display_task_strict(self, task_dict): + """Display task as strict YAML via ruamel.yaml. + + Guaranteed conformant YAML output for piping to yq/jq. + No hyperlinks, no Rich formatting. + + Parameters + ---------- + task_dict : dict + Task data dictionary with Link, Task, Boards, History, Metadata, etc. + """ + from io import StringIO yaml = YAML() yaml.default_flow_style = False - yaml.preserve_quotes = True - yaml.width = 4096 # Avoid unwanted line wrapping - yaml.dump(tasks_list, sys.stdout) + + # Build clean dict - use _url for the Link (plain URL string) + output = {"Link": task_dict.get("_url", "")} + + # Add Task section + if task_dict.get("Task"): + output["Task"] = {k: v for k, v in task_dict["Task"].items()} + + # Add Assignee if present (extract plain text from Rich Text if needed) + assignee = task_dict.get("_assignee") + if assignee is not None: + # Convert Rich Text to plain string, or use string directly + if isinstance(assignee, Text): + output["Assignee"] = assignee.plain + else: + output["Assignee"] = str(assignee) + + # Add Boards section without internal keys + if task_dict.get("Boards"): + boards = {} + for board_name, board_data in task_dict["Boards"].items(): + if isinstance(board_data, dict): + boards[board_name] = { + k: v for k, v in board_data.items() if not k.startswith("_") + } + else: + boards[board_name] = board_data + output["Boards"] = boards + + # Add History section if present + if task_dict.get("History"): + output["History"] = task_dict["History"] + + # Add Metadata section if present + if task_dict.get("Metadata"): + output["Metadata"] = task_dict["Metadata"] + + stream = StringIO() + yaml.dump([output], stream) + print(stream.getvalue(), end="") def task_show( self, task_id, show_history=False, show_metadata=False, show_comments=False @@ -2049,7 +2378,7 @@ def task_show( if show_history or show_comments: task_phid = result_data[0].get("phid") if task_phid: - log.info(f"Fetching transaction data for T{task_id}") + log.debug(f"Fetching transactions for T{task_id}") # Fetch all relevant transaction types in a single API call all_fetched_transactions = self._fetch_all_transactions( task_phid, @@ -2076,7 +2405,7 @@ def task_show( comments_map[task_id] = all_fetched_transactions["comments"] # Use shared method to format and display the task - self._format_and_display_tasks( + return self._format_and_display_tasks( result_data, task_transitions_map=task_transitions_map, priority_transitions_map=priority_transitions_map, @@ -2117,20 +2446,29 @@ def _load_search_from_yaml(self, template_path): raise PhabfiveException(f"Path is not a file: {template_path}") try: - with open(template_file, 'r', encoding='utf-8') as f: + with open(template_file, "r", encoding="utf-8") as f: yaml_loader = YAML() # Load all documents from the YAML file documents = list(yaml_loader.load_all(f)) except Exception as e: - raise PhabfiveException(f"Failed to parse template file {template_path}: {e}") + raise PhabfiveException( + f"Failed to parse template file {template_path}: {e}" + ) if not documents: raise PhabfiveException("Template file contains no documents") search_configs = [] supported_params = { - 'text_query', 'tag', 'created-after', 'updated-after', - 'column', 'priority', 'status', 'show-history', 'show-metadata' + "text_query", + "tag", + "created-after", + "updated-after", + "column", + "priority", + "status", + "show-history", + "show-metadata", } for i, data in enumerate(documents): @@ -2139,7 +2477,7 @@ def _load_search_from_yaml(self, template_path): f"Document {i + 1} in YAML file must contain a dictionary at root level" ) - search_params = data.get('search', {}) + search_params = data.get("search", {}) if not isinstance(search_params, dict): raise PhabfiveException( f"Document {i + 1}: 'search' section must be a dictionary" @@ -2155,13 +2493,15 @@ def _load_search_from_yaml(self, template_path): # Store the search config with optional title and description config = { - 'search': search_params, - 'title': data.get('title', f"Search {i + 1}"), - 'description': data.get('description', None) + "search": search_params, + "title": data.get("title", f"Search {i + 1}"), + "description": data.get("description", None), } search_configs.append(config) - log.info(f"Loaded {len(search_configs)} search configuration(s) from {template_path}") + log.info( + f"Loaded {len(search_configs)} search configuration(s) from {template_path}" + ) for i, config in enumerate(search_configs): log.debug(f"Search {i + 1} parameters: {config['search']}") @@ -2217,17 +2557,11 @@ def task_search( ) if not has_any_filter: - raise PhabfiveConfigException( - "No search criteria specified. Please provide at least one of:\n" - " - Free-text query: phabfive maniphest search 'search text'\n" - " - Project tag: --tag='Project Name'\n" - " - Date filter: --created-after=N or --updated-after=N\n" - " - Column filter: --column='pattern'\n" - " - Priority filter: --priority='pattern'\n" - " - Status filter: --status='pattern'" - ) + raise PhabfiveConfigException("No search criteria specified") - # Convert date filters to Unix timestamps + # Convert date filters to Unix timestamps (preserve original day values for logging) + created_after_days = created_after + updated_after_days = updated_after if created_after: created_after = days_to_unix(created_after) if updated_after: @@ -2283,8 +2617,13 @@ def task_search( log.error(f"No projects matched the tag pattern '{tag}'") return + # Determine AND vs OR logic for logging + has_and_patterns = any( + len(p.project_names) > 1 for p in project_patterns + ) + logic_type = "AND" if has_and_patterns else "OR" log.info( - f"Tag pattern '{tag}' resolved to {len(project_phids)} project(s)" + f"Tag pattern '{tag}' resolved to {len(project_phids)} project(s) with {logic_type} logic" ) except PhabfiveException as e: log.error(f"Invalid tag pattern: {e}") @@ -2343,9 +2682,8 @@ def task_search( # No more pages break else: - # Handle multiple projects with OR logic (make separate calls and merge) + # Handle multiple projects (make separate calls and merge) if len(project_phids) > 1: - log.info(f"Searching {len(project_phids)} projects with OR logic") all_tasks = {} # task_id -> task_data for phid in project_phids: constraints = {"projects": [phid]} @@ -2460,17 +2798,25 @@ def task_search( or project_patterns ): filter_desc = [] + if text_query: + filter_desc.append(f"query='{text_query}'") + if tag: + filter_desc.append(f"tag='{tag}'") + if created_after: + filter_desc.append(f"created-after={created_after_days}d") + if updated_after: + filter_desc.append(f"updated-after={updated_after_days}d") if transition_patterns: - filter_desc.append("column transition patterns") + col_strs = [str(p) for p in transition_patterns] + filter_desc.append(f"column='{','.join(col_strs)}'") if priority_patterns: - filter_desc.append("priority patterns") + pri_strs = [str(p) for p in priority_patterns] + filter_desc.append(f"priority='{','.join(pri_strs)}'") if status_patterns: - filter_desc.append("status patterns") - if project_patterns: - filter_desc.append("project patterns") - log.info( - f"Filtering {len(result_data)} tasks by {' and '.join(filter_desc)}" - ) + stat_strs = [str(p) for p in status_patterns] + filter_desc.append(f"status='{','.join(stat_strs)}'") + # Note: project_patterns is derived from tag, so not shown separately + log.info(f"Filtering {len(result_data)} tasks by {', '.join(filter_desc)}") # Add performance warning for large datasets if len(result_data) > 50: @@ -2600,7 +2946,9 @@ def task_search( if status_patterns: matching_status_map[item["id"]] = status_matches - log.info(f"Filtered down to {len(filtered_tasks)} tasks matching patterns") + log.info( + f"Found {len(filtered_tasks)} matches out of {len(result_data)} tasks in {len(project_phids)} project(s)" + ) result_data = filtered_tasks elif show_history: # Fetch transitions for all tasks when --show-history is used without filtering @@ -2630,7 +2978,7 @@ def task_search( ] # Use shared method to format and display tasks - self._format_and_display_tasks( + return self._format_and_display_tasks( result_data, task_transitions_map=task_transitions_map, priority_transitions_map=priority_transitions_map, @@ -2966,7 +3314,11 @@ def recurse_commit_transactions(task_config, parent_task_config, depth=0): if dry_run: # Extract title from transactions for display title = next( - (t["value"] for t in transactions_to_commit if t["type"] == "title"), + ( + t["value"] + for t in transactions_to_commit + if t["type"] == "title" + ), "", ) indent = " " * depth diff --git a/phabfive/maniphest_transitions.py b/phabfive/maniphest_transitions.py index 0c94188..32430b0 100644 --- a/phabfive/maniphest_transitions.py +++ b/phabfive/maniphest_transitions.py @@ -27,6 +27,25 @@ def __init__(self, conditions): """ self.conditions = conditions + def __str__(self): + """Return string representation of the pattern.""" + parts = [] + for cond in self.conditions: + cond_type = cond.get("type", "") + negated = cond.get("negated", False) + prefix = "not:" if negated else "" + + if cond_type in ("backward", "forward"): + parts.append(f"{prefix}{cond_type}") + else: + column = cond.get("column", "") + direction = cond.get("direction") + if direction: + parts.append(f"{prefix}{cond_type}:{column}:{direction}") + else: + parts.append(f"{prefix}{cond_type}:{column}") + return "+".join(parts) + def matches(self, task_transactions, current_column, column_info): """ Check if all conditions in this pattern match (AND logic). diff --git a/phabfive/passphrase.py b/phabfive/passphrase.py index 4ba32d9..52395d2 100644 --- a/phabfive/passphrase.py +++ b/phabfive/passphrase.py @@ -26,7 +26,9 @@ def _validate_identifier(self, id_): def get_secret(self, ids): if not self._validate_identifier(ids): - raise PhabfiveDataException(f"Identifier '{ids}' is not valid") + raise PhabfiveDataException( + f"Invalid passphrase ID '{ids}'. Expected format: K123" + ) ids = ids.replace("K", "") diff --git a/phabfive/paste.py b/phabfive/paste.py index e86b833..6a7b6b3 100644 --- a/phabfive/paste.py +++ b/phabfive/paste.py @@ -27,7 +27,9 @@ def _convert_ids(self, ids): for id_ in ids: if not self._validate_identifier(id_): - raise PhabfiveDataException(f"Identifier '{id_}' is not valid") + raise PhabfiveDataException( + f"Invalid paste ID '{id_}'. Expected format: P123" + ) id_ = id_.replace("P", "") # constraints takes int diff --git a/phabfive/priority_transitions.py b/phabfive/priority_transitions.py index 4e94def..4fc7963 100644 --- a/phabfive/priority_transitions.py +++ b/phabfive/priority_transitions.py @@ -59,6 +59,25 @@ def __init__(self, conditions): """ self.conditions = conditions + def __str__(self): + """Return string representation of the pattern.""" + parts = [] + for cond in self.conditions: + cond_type = cond.get("type", "") + negated = cond.get("negated", False) + prefix = "not:" if negated else "" + + if cond_type in ("raised", "lowered"): + parts.append(f"{prefix}{cond_type}") + else: + priority = cond.get("priority", "") + direction = cond.get("direction") + if direction: + parts.append(f"{prefix}{cond_type}:{priority}:{direction}") + else: + parts.append(f"{prefix}{cond_type}:{priority}") + return "+".join(parts) + def matches(self, priority_transactions, current_priority): """ Check if all conditions in this pattern match (AND logic). diff --git a/phabfive/project_filters.py b/phabfive/project_filters.py index d8b9270..a737c60 100644 --- a/phabfive/project_filters.py +++ b/phabfive/project_filters.py @@ -31,6 +31,10 @@ def __init__(self, project_names): """ self.project_names = project_names + def __str__(self): + """Return string representation of the pattern.""" + return "+".join(self.project_names) + def matches(self, task_project_names, resolved_projects_map): """ Check if all conditions in this pattern match (AND logic). diff --git a/phabfive/status_transitions.py b/phabfive/status_transitions.py index 7f387da..5bd00d7 100644 --- a/phabfive/status_transitions.py +++ b/phabfive/status_transitions.py @@ -119,6 +119,25 @@ def __init__(self, conditions, api_response=None): self.conditions = conditions self.api_response = api_response + def __str__(self): + """Return string representation of the pattern.""" + parts = [] + for cond in self.conditions: + cond_type = cond.get("type", "") + negated = cond.get("negated", False) + prefix = "not:" if negated else "" + + if cond_type in ("raised", "lowered"): + parts.append(f"{prefix}{cond_type}") + else: + status = cond.get("status", "") + direction = cond.get("direction") + if direction: + parts.append(f"{prefix}{cond_type}:{status}:{direction}") + else: + parts.append(f"{prefix}{cond_type}:{status}") + return "+".join(parts) + def matches(self, status_transactions, current_status): """ Check if all conditions in this pattern match (AND logic). diff --git a/pyproject.toml b/pyproject.toml index 7c406a8..c464cd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ "mkdocs>=1.6.1", "phabricator", "pyyaml", + "rich>=13.0.0", "ruamel-yaml>=0.18.16", ] diff --git a/tests/test_maniphest.py b/tests/test_maniphest.py index d7acad8..8ca0c13 100644 --- a/tests/test_maniphest.py +++ b/tests/test_maniphest.py @@ -1061,6 +1061,479 @@ def test_task_search_yaml_output_is_parsable(self, mock_init, capsys): assert task2["Task"]["Status"] == "Resolved" +class TestYAMLQuoting: + """Test that special characters are properly quoted in YAML output.""" + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_colon(self, mock_init): + """Test that colons trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("foo:bar") is True + assert maniphest._needs_yaml_quoting("http://example.com") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_braces(self, mock_init): + """Test that curly braces trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("{foo}") is True + assert maniphest._needs_yaml_quoting("${variable}") is True + assert maniphest._needs_yaml_quoting("foo}bar") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_brackets(self, mock_init): + """Test that square brackets trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("[BUG]") is True + assert maniphest._needs_yaml_quoting("[FEATURE] Add something") is True + assert maniphest._needs_yaml_quoting("foo]bar") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_backticks(self, mock_init): + """Test that backticks trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("`code`") is True + assert maniphest._needs_yaml_quoting("Run `make build`") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_single_quotes(self, mock_init): + """Test that single quotes trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("'LOREM'") is True + assert maniphest._needs_yaml_quoting("It's working") is True + assert maniphest._needs_yaml_quoting("Don't do that") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_double_quotes(self, mock_init): + """Test that double quotes trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting('"LOREM"') is True + assert maniphest._needs_yaml_quoting('Say "hello"') is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_empty_string(self, mock_init): + """Test that empty strings trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("") is True + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_safe_strings(self, mock_init): + """Test that safe strings don't trigger quoting.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting("Normal task name") is False + assert maniphest._needs_yaml_quoting("Task with numbers 123") is False + assert maniphest._needs_yaml_quoting("Task-with-dashes") is False + assert maniphest._needs_yaml_quoting("Task_with_underscores") is False + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_needs_yaml_quoting_non_string(self, mock_init): + """Test that non-strings return False.""" + mock_init.return_value = None + maniphest = Maniphest() + assert maniphest._needs_yaml_quoting(123) is False + assert maniphest._needs_yaml_quoting(None) is False + assert maniphest._needs_yaml_quoting(["list"]) is False + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_yaml_output_with_brackets(self, mock_init, capsys): + """Test that task names with square brackets produce valid YAML.""" + from io import StringIO + + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "[BUG] Something is broken", + "status": {"name": "Open"}, + "priority": {"name": "High"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + assert parsed_data[0]["Task"]["Name"] == "[BUG] Something is broken" + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_yaml_output_with_backticks(self, mock_init, capsys): + """Test that task names with backticks produce valid YAML.""" + from io import StringIO + + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "Fix `make build` command", + "status": {"name": "Open"}, + "priority": {"name": "Normal"}, + "description": {"raw": "Run `make test` first"}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + assert parsed_data[0]["Task"]["Name"] == "Fix `make build` command" + assert parsed_data[0]["Task"]["Description"] == "Run `make test` first" + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_yaml_output_with_mixed_special_chars(self, mock_init, capsys): + """Test task with multiple special character types.""" + from io import StringIO + + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "[BUG]: Fix {template} rendering in `parser.py`", + "status": {"name": "Open"}, + "priority": {"name": "High"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + assert ( + parsed_data[0]["Task"]["Name"] + == "[BUG]: Fix {template} rendering in `parser.py`" + ) + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_yaml_output_with_single_quotes(self, mock_init, capsys): + """Test that task names with single quotes produce valid YAML with preserved quotes.""" + from io import StringIO + + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "'LOREM'", + "status": {"name": "Open"}, + "priority": {"name": "Normal"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + # Single quotes should be preserved in the parsed value + assert parsed_data[0]["Task"]["Name"] == "'LOREM'" + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_yaml_output_with_double_quotes(self, mock_init, capsys): + """Test that task names with double quotes produce valid YAML with preserved quotes.""" + from io import StringIO + + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": '"LOREM"', + "status": {"name": "Open"}, + "priority": {"name": "Normal"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + # Double quotes should be preserved in the parsed value + assert parsed_data[0]["Task"]["Name"] == '"LOREM"' + + +class TestStrictFormat: + """Test that strict format produces guaranteed valid YAML via ruamel.yaml.""" + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_strict_format_output_is_valid_yaml(self, mock_init, capsys): + """Test that strict format produces valid YAML.""" + from io import StringIO + + from phabfive.core import Phabfive + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + # Set strict output format + Phabfive.set_output_options(output_format="strict") + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "Simple task", + "status": {"name": "Open"}, + "priority": {"name": "Normal"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + assert parsed_data[0]["Task"]["Name"] == "Simple task" + + # Reset to default + Phabfive.set_output_options(output_format="rich") + + @patch("phabfive.maniphest.Phabfive.__init__") + def test_strict_format_with_special_chars(self, mock_init, capsys): + """Test that strict format handles special characters correctly.""" + from io import StringIO + + from phabfive.core import Phabfive + from ruamel.yaml import YAML + + mock_init.return_value = None + maniphest = Maniphest() + maniphest.url = "https://phabricator.example.com" + + # Set strict output format + Phabfive.set_output_options(output_format="strict") + + maniphest.phab = MagicMock() + mock_project_result = MagicMock() + mock_project_result.get.return_value = { + "PHID-PROJ-123": {"name": "Test Project", "slugs": []} + } + maniphest.phab.project.query.return_value = mock_project_result + + mock_response = MagicMock() + mock_response.response = { + "data": [ + { + "id": 1, + "phid": "PHID-TASK-1", + "fields": { + "name": "[BUG]: Fix {template} `code` 'quotes' \"double\"", + "status": {"name": "Open"}, + "priority": {"name": "Normal"}, + "description": {"raw": ""}, + "dateCreated": 1234567890, + "dateModified": 1234567900, + "dateClosed": None, + }, + "attachments": {"columns": {"boards": {}}}, + }, + ] + } + mock_response.get.return_value = {"after": None} + maniphest.phab.maniphest.search.return_value = mock_response + + maniphest.task_search(tag="Test Project") + + captured = capsys.readouterr() + yaml_output = captured.out + + yaml_parser = YAML() + parsed_data = yaml_parser.load(StringIO(yaml_output)) + + assert isinstance(parsed_data, list) + assert len(parsed_data) == 1 + # All special characters should be preserved + assert ( + parsed_data[0]["Task"]["Name"] + == "[BUG]: Fix {template} `code` 'quotes' \"double\"" + ) + + # Reset to default + Phabfive.set_output_options(output_format="rich") + + class TestTaskSearchTextQuery: """Test suite for free-text search functionality.""" diff --git a/uv.lock b/uv.lock index b40ab01..3304495 100644 --- a/uv.lock +++ b/uv.lock @@ -362,6 +362,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -456,6 +468,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + [[package]] name = "mergedeep" version = "1.3.4" @@ -533,6 +554,7 @@ dependencies = [ { name = "mkdocs" }, { name = "phabricator" }, { name = "pyyaml" }, + { name = "rich" }, { name = "ruamel-yaml" }, ] @@ -561,6 +583,7 @@ requires-dist = [ { name = "mkdocs", marker = "extra == 'docs'" }, { name = "phabricator" }, { name = "pyyaml" }, + { name = "rich", specifier = ">=13.0.0" }, { name = "ruamel-yaml", specifier = ">=0.18.16" }, ] provides-extras = ["docs"] @@ -780,6 +803,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + [[package]] name = "ruamel-yaml" version = "0.19.1"