From 8f0209d78845df679b64233b07dfb60bb976642a Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Thu, 18 Dec 2025 16:15:29 -0600 Subject: [PATCH 01/34] Add copy-paste mode with cp: prefix for web UI interaction, based on the copy-paste-no-api branch of https://github.com/ther0bster/aider.git --- aider/coders/base_coder.py | 6 +- aider/commands.py | 2 + aider/main.py | 20 ++++-- aider/models.py | 139 +++++++++++++++++++++++++++++++++++-- 4 files changed, 156 insertions(+), 11 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index b97d0300f8b..086a9adc920 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -172,7 +172,7 @@ async def create( if from_coder: main_model = from_coder.main_model else: - main_model = models.Model(models.DEFAULT_MODEL_NAME) + main_model = models.Model(models.DEFAULT_MODEL_NAME, io=io) if edit_format == "code": edit_format = None @@ -393,7 +393,7 @@ def __init__( self.main_model.reasoning_tag if self.main_model.reasoning_tag else REASONING_TAG ) - self.stream = stream and main_model.streaming + self.stream = stream and main_model.streaming and not main_model.copy_paste_instead_of_api if cache_prompts and self.main_model.cache_control: self.add_cache_headers = True @@ -576,6 +576,8 @@ def get_announcements(self): output += ", prompt cache" if main_model.info.get("supports_assistant_prefill"): output += ", infinite output" + if main_model.copy_paste_instead_of_api: + output += ", copy/paste mode" lines.append(output) diff --git a/aider/commands.py b/aider/commands.py index 843c0691574..2d7f104918c 100644 --- a/aider/commands.py +++ b/aider/commands.py @@ -100,6 +100,7 @@ async def cmd_model(self, args): model_name, editor_model=self.coder.main_model.editor_model.name, weak_model=self.coder.main_model.weak_model.name, + io=self.io, ) await models.sanity_check_models(self.io, model) @@ -172,6 +173,7 @@ async def cmd_weak_model(self, args): self.coder.main_model.name, editor_model=self.coder.main_model.editor_model.name, weak_model=model_name, + io=self.io, ) await models.sanity_check_models(self.io, model) raise SwitchCoder(main_model=model) diff --git a/aider/main.py b/aider/main.py index 723cdc8c668..071f2891e4b 100644 --- a/aider/main.py +++ b/aider/main.py @@ -968,17 +968,24 @@ def parse_model_with_suffix(model_name, overrides): if not model_name: return model_name, {} - # Split on last colon to get model name and suffix - if ":" in model_name: - base_model, suffix = model_name.rsplit(":", 1) + prefix = "" + base_model = model_name + if model_name.startswith(models.COPY_PASTE_PREFIX): + prefix = models.COPY_PASTE_PREFIX + base_model = model_name[len(prefix) :] + + if ":" in base_model: + base_model, suffix = base_model.rsplit(":", 1) else: - base_model, suffix = model_name, None + suffix = None - # Apply overrides if suffix exists override_kwargs = {} if suffix and base_model in overrides and suffix in overrides[base_model]: override_kwargs = overrides[base_model][suffix].copy() + if prefix: + base_model = prefix + base_model + return base_model, override_kwargs # Parse main model @@ -997,6 +1004,7 @@ def parse_model_with_suffix(model_name, overrides): weak_model_name, weak_model=False, verbose=args.verbose, + io=io, override_kwargs=weak_model_overrides, ) @@ -1007,6 +1015,7 @@ def parse_model_with_suffix(model_name, overrides): editor_model_name, editor_model=False, verbose=args.verbose, + io=io, override_kwargs=editor_model_overrides, ) @@ -1047,6 +1056,7 @@ def parse_model_with_suffix(model_name, overrides): editor_model=editor_model_obj, editor_edit_format=args.editor_edit_format, verbose=args.verbose, + io=io, override_kwargs=main_model_overrides, ) diff --git a/aider/models.py b/aider/models.py index a3bc3024539..e7ef57f8235 100644 --- a/aider/models.py +++ b/aider/models.py @@ -24,6 +24,7 @@ from aider.utils import check_pip_install_extra RETRY_TIMEOUT = 60 +COPY_PASTE_PREFIX = "cp:" request_timeout = 600 @@ -316,15 +317,29 @@ def __init__( weak_model=None, editor_model=None, editor_edit_format=None, - verbose=False, + verbose=False, io=None, override_kwargs=None, ): - # Map any alias to its canonical name + # Determine copy/paste mode and map model aliases + provided_model = model or "" + if isinstance(provided_model, Model): + provided_model = provided_model.name + elif not isinstance(provided_model, str): + provided_model = str(provided_model) + + self.io = io + self.verbose = verbose + self.override_kwargs = override_kwargs or {} + + self.copy_paste_instead_of_api = provided_model.startswith(COPY_PASTE_PREFIX) + if self.copy_paste_instead_of_api: + model = provided_model.removeprefix(COPY_PASTE_PREFIX) + else: + model = provided_model + model = MODEL_ALIASES.get(model, model) self.name = model - self.verbose = verbose - self.override_kwargs = override_kwargs or {} self.max_chat_history_tokens = 1024 self.weak_model = None @@ -355,6 +370,9 @@ def __init__( else: self.get_editor_model(editor_model, editor_edit_format) + if self.copy_paste_instead_of_api: + self.streaming = False + def get_model_info(self, model): return model_info_manager.get_model_info(model) @@ -597,6 +615,11 @@ def get_weak_model(self, provided_weak_model): self.weak_model_name = None return + if self.copy_paste_instead_of_api: + self.weak_model = self + self.weak_model_name = None + return + # If provided_weak_model is already a Model object, use it directly if isinstance(provided_weak_model, Model): self.weak_model = provided_weak_model @@ -618,6 +641,7 @@ def get_weak_model(self, provided_weak_model): self.weak_model = Model( self.weak_model_name, weak_model=False, + io=self.io, ) return self.weak_model @@ -625,6 +649,11 @@ def commit_message_models(self): return [self.weak_model, self] def get_editor_model(self, provided_editor_model, editor_edit_format): + if self.copy_paste_instead_of_api: + provided_editor_model = False + self.editor_model_name = self.name + self.editor_model = self + # If provided_editor_model is already a Model object, use it directly if isinstance(provided_editor_model, Model): self.editor_model = provided_editor_model @@ -643,6 +672,7 @@ def get_editor_model(self, provided_editor_model, editor_edit_format): self.editor_model = Model( self.editor_model_name, editor_model=False, + io=self.io, ) if not self.editor_edit_format: @@ -955,6 +985,9 @@ async def send_completion( messages = model_request_parser(self, messages) + if self.copy_paste_instead_of_api: + return self.copy_paste_completion(messages) + if self.verbose: for message in messages: msg_role = message.get("role") @@ -1061,6 +1094,101 @@ async def send_completion( return hash_object, res + def copy_paste_completion(self, messages): + try: + import pyperclip + import uuid + except ImportError: + if self.io: + self.io.tool_error('copy/paste mode requires the pyperclip package.') + self.io.tool_output('Install it with: pip install pyperclip') + raise + + def content_to_text(content): + if not content: + return '' + if isinstance(content, str): + return content + if isinstance(content, list): + parts = [] + for part in content: + if isinstance(part, dict): + text = part.get('text') + if isinstance(text, str): + parts.append(text) + elif isinstance(part, str): + parts.append(part) + return ''.join(parts) + if isinstance(content, dict): + text = content.get('text') + if isinstance(text, str): + return text + return '' + return str(content) + + lines = [] + for message in messages: + text_content = content_to_text(message.get('content')) + if not text_content: + continue + role = message.get('role') + if role: + lines.append(f"{role.upper()}:\n{text_content}") + else: + lines.append(text_content) + + prompt_text = "\n\n".join(lines).strip() + + try: + pyperclip.copy(prompt_text) + except Exception as err: + if self.io: + self.io.tool_error(f'Unable to copy prompt to clipboard: {err}') + raise + + if self.io: + self.io.tool_output('Request copied to clipboard.') + self.io.tool_output('Paste it into your LLM interface, then copy the reply back.') + self.io.tool_output('Waiting for clipboard updates (Ctrl+C to cancel)...') + + try: + last_value = pyperclip.paste() + except Exception as err: + if self.io: + self.io.tool_error(f'Unable to read clipboard: {err}') + raise + + while True: + time.sleep(0.5) + try: + current_value = pyperclip.paste() + except Exception as err: + if self.io: + self.io.tool_error(f'Unable to read clipboard: {err}') + raise + if current_value != last_value: + response_text = current_value + break + + completion = litellm.ModelResponse( + id=f'chatcmpl-{uuid.uuid4()}', + choices=[ + litellm.Choices( + index=0, + finish_reason='stop', + message=litellm.Message(role='assistant', content=response_text), + ) + ], + created=int(time.time()), + model=self.name, + usage={'prompt_tokens': 0, 'completion_tokens': 0, 'total_tokens': 0}, + ) + + kwargs = dict(model=self.name, messages=messages, stream=False) + hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) + + return hash_object, completion + async def simple_send_with_retries(self, messages, max_tokens=None): from aider.exceptions import LiteLLMExceptions @@ -1201,6 +1329,9 @@ async def sanity_check_models(io, main_model): async def sanity_check_model(io, model): + if getattr(model, 'copy_paste_instead_of_api', False): + return False + show = False if model.missing_keys: From e14b1b785f75a96e3517e0c9c55d1fbfd80eaec3 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 09:40:02 -0600 Subject: [PATCH 02/34] feat: add CopyPasteCoder and remove legacy copy-paste path --- aider/coders/__init__.py | 2 + aider/coders/base_coder.py | 6 ++ aider/coders/copypaste_coder.py | 146 ++++++++++++++++++++++++++++++++ aider/models.py | 98 --------------------- 4 files changed, 154 insertions(+), 98 deletions(-) create mode 100644 aider/coders/copypaste_coder.py diff --git a/aider/coders/__init__.py b/aider/coders/__init__.py index ebe4a47dd14..bbe3e1dd15f 100644 --- a/aider/coders/__init__.py +++ b/aider/coders/__init__.py @@ -3,6 +3,7 @@ from .ask_coder import AskCoder from .base_coder import Coder from .context_coder import ContextCoder +from .copypaste_coder import CopyPasteCoder from .editblock_coder import EditBlockCoder from .editblock_fenced_coder import EditBlockFencedCoder from .editor_diff_fenced_coder import EditorDiffFencedCoder @@ -33,4 +34,5 @@ EditorDiffFencedCoder, ContextCoder, AgentCoder, + CopyPasteCoder, ] diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 086a9adc920..5695268a48f 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -229,6 +229,12 @@ async def create( kwargs = use_kwargs from_coder.ok_to_warm_cache = False + if getattr(main_model, "copy_paste_instead_of_api", False): + res = coders.CopyPasteCoder(main_model, io, args=args, **kwargs) + await res.initialize_mcp_tools() + res.original_kwargs = dict(kwargs) + return res + for coder in coders.__all__: if hasattr(coder, "edit_format") and coder.edit_format == edit_format: res = coder(main_model, io, args=args, **kwargs) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py new file mode 100644 index 00000000000..36a409eb00b --- /dev/null +++ b/aider/coders/copypaste_coder.py @@ -0,0 +1,146 @@ +import hashlib +import json +import time +import uuid + +from aider.llm import litellm + +from .base_coder import Coder + + +class CopyPasteCoder(Coder): + """Coder implementation that performs clipboard-driven interactions.""" + + async def send(self, messages, model=None, functions=None, tools=None): + model = model or self.main_model + + if not getattr(model, "copy_paste_instead_of_api", False): + async for chunk in super().send( + messages, model=model, functions=functions, tools=tools + ): + yield chunk + return + + if functions: + self.io.tool_warning("copy/paste mode ignores function call requests.") + if tools: + self.io.tool_warning("copy/paste mode ignores tool call requests.") + + self.got_reasoning_content = False + self.ended_reasoning_content = False + + self._streaming_buffer_length = 0 + self.io.reset_streaming_response() + + self.partial_response_content = "" + self.partial_response_reasoning_content = "" + self.partial_response_chunks = [] + self.partial_response_tool_calls = [] + self.partial_response_function_call = dict() + + completion = None + + try: + hash_object, completion = self.copy_paste_completion(messages, model) + self.chat_completion_call_hashes.append(hash_object.hexdigest()) + self.show_send_output(completion) + self.calculate_and_show_tokens_and_cost(messages, completion) + finally: + self.preprocess_response() + + if self.partial_response_content: + self.io.ai_output(self.partial_response_content) + elif self.partial_response_function_call: + args = self.parse_partial_args() + if args: + self.io.ai_output(json.dumps(args, indent=4)) + + def copy_paste_completion(self, messages, model): + try: + import pyperclip + except ImportError: # pragma: no cover - import error path + self.io.tool_error("copy/paste mode requires the pyperclip package.") + self.io.tool_output("Install it with: pip install pyperclip") + raise + + def content_to_text(content): + if not content: + return "" + if isinstance(content, str): + return content + if isinstance(content, list): + parts = [] + for part in content: + if isinstance(part, dict): + text = part.get("text") + if isinstance(text, str): + parts.append(text) + elif isinstance(part, str): + parts.append(part) + return "".join(parts) + if isinstance(content, dict): + text = content.get("text") + if isinstance(text, str): + return text + return "" + return str(content) + + lines = [] + for message in messages: + text_content = content_to_text(message.get("content")) + if not text_content: + continue + role = message.get("role") + if role: + lines.append(f"{role.upper()}:\n{text_content}") + else: + lines.append(text_content) + + prompt_text = "\n\n".join(lines).strip() + + try: + pyperclip.copy(prompt_text) + except Exception as err: # pragma: no cover - clipboard error path + self.io.tool_error(f"Unable to copy prompt to clipboard: {err}") + raise + + self.io.tool_output("Request copied to clipboard.") + self.io.tool_output("Paste it into your LLM interface, then copy the reply back.") + self.io.tool_output("Waiting for clipboard updates (Ctrl+C to cancel)...") + + try: + last_value = pyperclip.paste() + except Exception as err: # pragma: no cover - clipboard error path + self.io.tool_error(f"Unable to read clipboard: {err}") + raise + + while True: + time.sleep(0.5) + try: + current_value = pyperclip.paste() + except Exception as err: # pragma: no cover - clipboard error path + self.io.tool_error(f"Unable to read clipboard: {err}") + raise + if current_value != last_value: + response_text = current_value + break + + completion = litellm.ModelResponse( + id=f"chatcmpl-{uuid.uuid4()}", + choices=[ + litellm.Choices( + index=0, + finish_reason="stop", + message=litellm.Message(role="assistant", content=response_text), + ) + ], + created=int(time.time()), + model=model.name, + usage={"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, + ) + + kwargs = dict(model=model.name, messages=messages, stream=False) + hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) + + return hash_object, completion + diff --git a/aider/models.py b/aider/models.py index e7ef57f8235..e0ebf2f9bd3 100644 --- a/aider/models.py +++ b/aider/models.py @@ -985,9 +985,6 @@ async def send_completion( messages = model_request_parser(self, messages) - if self.copy_paste_instead_of_api: - return self.copy_paste_completion(messages) - if self.verbose: for message in messages: msg_role = message.get("role") @@ -1094,101 +1091,6 @@ async def send_completion( return hash_object, res - def copy_paste_completion(self, messages): - try: - import pyperclip - import uuid - except ImportError: - if self.io: - self.io.tool_error('copy/paste mode requires the pyperclip package.') - self.io.tool_output('Install it with: pip install pyperclip') - raise - - def content_to_text(content): - if not content: - return '' - if isinstance(content, str): - return content - if isinstance(content, list): - parts = [] - for part in content: - if isinstance(part, dict): - text = part.get('text') - if isinstance(text, str): - parts.append(text) - elif isinstance(part, str): - parts.append(part) - return ''.join(parts) - if isinstance(content, dict): - text = content.get('text') - if isinstance(text, str): - return text - return '' - return str(content) - - lines = [] - for message in messages: - text_content = content_to_text(message.get('content')) - if not text_content: - continue - role = message.get('role') - if role: - lines.append(f"{role.upper()}:\n{text_content}") - else: - lines.append(text_content) - - prompt_text = "\n\n".join(lines).strip() - - try: - pyperclip.copy(prompt_text) - except Exception as err: - if self.io: - self.io.tool_error(f'Unable to copy prompt to clipboard: {err}') - raise - - if self.io: - self.io.tool_output('Request copied to clipboard.') - self.io.tool_output('Paste it into your LLM interface, then copy the reply back.') - self.io.tool_output('Waiting for clipboard updates (Ctrl+C to cancel)...') - - try: - last_value = pyperclip.paste() - except Exception as err: - if self.io: - self.io.tool_error(f'Unable to read clipboard: {err}') - raise - - while True: - time.sleep(0.5) - try: - current_value = pyperclip.paste() - except Exception as err: - if self.io: - self.io.tool_error(f'Unable to read clipboard: {err}') - raise - if current_value != last_value: - response_text = current_value - break - - completion = litellm.ModelResponse( - id=f'chatcmpl-{uuid.uuid4()}', - choices=[ - litellm.Choices( - index=0, - finish_reason='stop', - message=litellm.Message(role='assistant', content=response_text), - ) - ], - created=int(time.time()), - model=self.name, - usage={'prompt_tokens': 0, 'completion_tokens': 0, 'total_tokens': 0}, - ) - - kwargs = dict(model=self.name, messages=messages, stream=False) - hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) - - return hash_object, completion - async def simple_send_with_retries(self, messages, max_tokens=None): from aider.exceptions import LiteLLMExceptions From 658b5a5e5c74770bfb978017eaecf514d8e86b5c Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 10:14:41 -0600 Subject: [PATCH 03/34] refactor: switch clipboard I/O to copypaste module --- aider/coders/copypaste_coder.py | 26 ++++------- aider/copypaste.py | 83 ++++++++++++++++++++++++++------- 2 files changed, 77 insertions(+), 32 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 36a409eb00b..8d9d75dd0f4 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -57,7 +57,7 @@ async def send(self, messages, model=None, functions=None, tools=None): def copy_paste_completion(self, messages, model): try: - import pyperclip + from aider import copypaste except ImportError: # pragma: no cover - import error path self.io.tool_error("copy/paste mode requires the pyperclip package.") self.io.tool_output("Install it with: pip install pyperclip") @@ -99,8 +99,8 @@ def content_to_text(content): prompt_text = "\n\n".join(lines).strip() try: - pyperclip.copy(prompt_text) - except Exception as err: # pragma: no cover - clipboard error path + copypaste.copy_to_clipboard(prompt_text) + except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path self.io.tool_error(f"Unable to copy prompt to clipboard: {err}") raise @@ -109,21 +109,16 @@ def content_to_text(content): self.io.tool_output("Waiting for clipboard updates (Ctrl+C to cancel)...") try: - last_value = pyperclip.paste() - except Exception as err: # pragma: no cover - clipboard error path + last_value = copypaste.read_clipboard() + except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path self.io.tool_error(f"Unable to read clipboard: {err}") raise - while True: - time.sleep(0.5) - try: - current_value = pyperclip.paste() - except Exception as err: # pragma: no cover - clipboard error path - self.io.tool_error(f"Unable to read clipboard: {err}") - raise - if current_value != last_value: - response_text = current_value - break + try: + response_text = copypaste.wait_for_clipboard_change(initial=last_value) + except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path + self.io.tool_error(f"Unable to read clipboard: {err}") + raise completion = litellm.ModelResponse( id=f"chatcmpl-{uuid.uuid4()}", @@ -143,4 +138,3 @@ def content_to_text(content): hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) return hash_object, completion - diff --git a/aider/copypaste.py b/aider/copypaste.py index c8dfbe378d0..6f241f313ec 100644 --- a/aider/copypaste.py +++ b/aider/copypaste.py @@ -4,8 +4,50 @@ import pyperclip +class ClipboardError(Exception): + """Raised when clipboard operations fail.""" + + +class ClipboardStopped(Exception): + """Raised when clipboard monitoring stops before a change occurs.""" + + +def copy_to_clipboard(text): + """Copy text to the system clipboard.""" + try: + pyperclip.copy(text) + except Exception as err: # pragma: no cover - system clipboard errors + raise ClipboardError(err) from err + + +def read_clipboard(): + """Read text from the system clipboard.""" + try: + return pyperclip.paste() + except Exception as err: # pragma: no cover - system clipboard errors + raise ClipboardError(err) from err + + +def wait_for_clipboard_change(initial=None, poll_interval=0.5, stop_event=None): + """Block until the clipboard value changes and return the new contents.""" + last_value = initial + if last_value is None: + last_value = read_clipboard() + + while True: + current = read_clipboard() + if current != last_value: + return current + + if stop_event: + if stop_event.wait(poll_interval): + raise ClipboardStopped() + else: + time.sleep(poll_interval) + + class ClipboardWatcher: - """Watches clipboard for changes and updates IO placeholder""" + """Watches clipboard for changes and updates IO placeholder.""" def __init__(self, io, verbose=False): self.io = io @@ -16,34 +58,43 @@ def __init__(self, io, verbose=False): self.io.clipboard_watcher = self def start(self): - """Start watching clipboard for changes""" + """Start watching clipboard for changes.""" self.stop_event = threading.Event() - self.last_clipboard = pyperclip.paste() + self.last_clipboard = read_clipboard() def watch_clipboard(): while not self.stop_event.is_set(): try: - current = pyperclip.paste() - if current != self.last_clipboard: - self.last_clipboard = current - self.io.interrupt_input() - self.io.placeholder = current - if len(current.splitlines()) > 1: - self.io.placeholder = "\n" + self.io.placeholder + "\n" - - time.sleep(0.5) - except Exception as e: + current = wait_for_clipboard_change( + initial=self.last_clipboard, + stop_event=self.stop_event, + ) + except ClipboardStopped: + break + except ClipboardError as err: if self.verbose: from aider.dump import dump - dump(f"Clipboard watcher error: {e}") + dump(f"Clipboard watcher error: {err}") continue + except Exception as err: # pragma: no cover - unexpected errors + if self.verbose: + from aider.dump import dump + + dump(f"Clipboard watcher unexpected error: {err}") + continue + + self.last_clipboard = current + self.io.interrupt_input() + self.io.placeholder = current + if len(current.splitlines()) > 1: + self.io.placeholder = "\n" + self.io.placeholder + "\n" self.watcher_thread = threading.Thread(target=watch_clipboard, daemon=True) self.watcher_thread.start() def stop(self): - """Stop watching clipboard for changes""" + """Stop watching clipboard for changes.""" if self.stop_event: self.stop_event.set() if self.watcher_thread: @@ -53,7 +104,7 @@ def stop(self): def main(): - """Example usage of the clipboard watcher""" + """Example usage of the clipboard watcher.""" from aider.io import InputOutput io = InputOutput() From 97ed88018d5b35d2386607ced48f7532c963d287 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 10:31:10 -0600 Subject: [PATCH 04/34] feat: add local token counting to CopyPasteCoder and fill usage Co-authored-by: aider-ce (openai/gpt-5) --- aider/coders/copypaste_coder.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 8d9d75dd0f4..dea7fd3d74f 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -1,5 +1,6 @@ import hashlib import json +import math import time import uuid @@ -120,6 +121,22 @@ def content_to_text(content): self.io.tool_error(f"Unable to read clipboard: {err}") raise + # Estimate tokens locally using the model's tokenizer; fallback to heuristic. + def _safe_token_count(text): + if not text: + return 0 + try: + count = model.token_count(text) + if isinstance(count, int) and count >= 0: + return count + except Exception: + pass + return int(math.ceil(len(text) / 4)) + + prompt_tokens = _safe_token_count(prompt_text) + completion_tokens = _safe_token_count(response_text) + total_tokens = prompt_tokens + completion_tokens + completion = litellm.ModelResponse( id=f"chatcmpl-{uuid.uuid4()}", choices=[ @@ -131,7 +148,11 @@ def content_to_text(content): ], created=int(time.time()), model=model.name, - usage={"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, + usage={ + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens, + "total_tokens": total_tokens, + }, ) kwargs = dict(model=model.name, messages=messages, stream=False) From 4e7fffe49872f965bd2c1adff8603474d195f81d Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 10:38:11 -0600 Subject: [PATCH 05/34] fix: map LiteLLM token count exceptions to user-friendly warnings --- aider/coders/copypaste_coder.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index dea7fd3d74f..ad004747490 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -5,6 +5,7 @@ import uuid from aider.llm import litellm +from aider.exceptions import LiteLLMExceptions from .base_coder import Coder @@ -129,8 +130,17 @@ def _safe_token_count(text): count = model.token_count(text) if isinstance(count, int) and count >= 0: return count - except Exception: - pass + except Exception as ex: + # Try to map known LiteLLM exceptions to user-friendly messages, then fallback. + try: + ex_info = LiteLLMExceptions().get_ex_info(ex) + if ex_info and ex_info.description: + self.io.tool_warning( + f"Token count failed: {ex_info.description} Falling back to heuristic." + ) + except Exception: + # Avoid masking the original issue during error mapping. + pass return int(math.ceil(len(text) / 4)) prompt_tokens = _safe_token_count(prompt_text) From 196d7f313430826515764b0e1e89fbeca0bd78f1 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 10:42:26 -0600 Subject: [PATCH 06/34] refactor: remove unused streaming-related fields in CopyPasteCoder Co-authored-by: aider-ce (openai/gpt-5) --- aider/coders/copypaste_coder.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index ad004747490..0f2b3406314 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -28,19 +28,9 @@ async def send(self, messages, model=None, functions=None, tools=None): if tools: self.io.tool_warning("copy/paste mode ignores tool call requests.") - self.got_reasoning_content = False - self.ended_reasoning_content = False - - self._streaming_buffer_length = 0 self.io.reset_streaming_response() self.partial_response_content = "" - self.partial_response_reasoning_content = "" - self.partial_response_chunks = [] - self.partial_response_tool_calls = [] - self.partial_response_function_call = dict() - - completion = None try: hash_object, completion = self.copy_paste_completion(messages, model) @@ -52,10 +42,6 @@ async def send(self, messages, model=None, functions=None, tools=None): if self.partial_response_content: self.io.ai_output(self.partial_response_content) - elif self.partial_response_function_call: - args = self.parse_partial_args() - if args: - self.io.ai_output(json.dumps(args, indent=4)) def copy_paste_completion(self, messages, model): try: From 05a906bd582798859b02381006fef691e84e070d Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:07:14 -0600 Subject: [PATCH 07/34] fix: initialize CopyPasteCoder.gpt_prompts from selected edit_format Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 70 ++++++++++++++++++++++++++++++--- 1 file changed, 65 insertions(+), 5 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 0f2b3406314..a19f8ddd9b3 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -4,22 +4,82 @@ import time import uuid -from aider.llm import litellm from aider.exceptions import LiteLLMExceptions +from aider.llm import litellm from .base_coder import Coder class CopyPasteCoder(Coder): - """Coder implementation that performs clipboard-driven interactions.""" + """Coder implementation that performs clipboard-driven interactions. + + This coder swaps the transport mechanism (clipboard vs API) but must remain compatible with the + base ``Coder`` interface. In particular, many base methods assume ``self.gpt_prompts`` exists. + + We therefore mirror the prompt pack from the coder that matches the currently selected + ``edit_format``. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Ensure CopyPasteCoder always has a prompt pack. + # We mirror prompts from the coder that matches the active edit format. + self._init_prompts_from_selected_edit_format() + + def _init_prompts_from_selected_edit_format(self): + """ + Initialize ``self.gpt_prompts`` (and related prompt-dependent metadata) using the coder + class matching the currently selected edit format. + + This prevents AttributeError crashes when base ``Coder`` code assumes ``self.gpt_prompts`` + exists (eg during message formatting, announcements, cancellation/cleanup paths, etc). + """ + # Determine the selected edit_format the same way Coder.create() does. + selected_edit_format = None + if getattr(self, "args", None) is not None and getattr(self.args, "edit_format", None): + selected_edit_format = self.args.edit_format + else: + selected_edit_format = getattr(self.main_model, "edit_format", None) + + # "code" is treated like None in Coder.create() + if selected_edit_format == "code": + selected_edit_format = None + + # If no edit format is selected, fall back to model default. + if selected_edit_format is None: + selected_edit_format = getattr(self.main_model, "edit_format", None) + + # Find the coder class that would have been selected for this edit_format. + try: + import aider.coders as coders + except Exception: + coders = None + + target_coder_class = None + if coders is not None: + for coder_cls in getattr(coders, "__all__", []): + if hasattr(coder_cls, "edit_format") and coder_cls.edit_format == selected_edit_format: + target_coder_class = coder_cls + break + + # Mirror prompt pack + edit_format where available. + if target_coder_class is not None and hasattr(target_coder_class, "gpt_prompts"): + self.gpt_prompts = target_coder_class.gpt_prompts + # Keep announcements/formatting consistent with the selected coder. + self.edit_format = getattr(target_coder_class, "edit_format", self.edit_format) + return + + # Last-resort fallback: avoid crashing if we can't determine the prompts. + # Prefer keeping any existing gpt_prompts (if one was set elsewhere). + if not hasattr(self, "gpt_prompts"): + self.gpt_prompts = None async def send(self, messages, model=None, functions=None, tools=None): model = model or self.main_model if not getattr(model, "copy_paste_instead_of_api", False): - async for chunk in super().send( - messages, model=model, functions=functions, tools=tools - ): + async for chunk in super().send(messages, model=model, functions=functions, tools=tools): yield chunk return From ec4fb44d650d7f1fdb7f10e9298a4e10aba1f8bb Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:13:34 -0600 Subject: [PATCH 08/34] fix: initialize streaming attributes in CopyPasteCoder Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index a19f8ddd9b3..cccc9bea2ca 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -90,7 +90,11 @@ async def send(self, messages, model=None, functions=None, tools=None): self.io.reset_streaming_response() + # Base Coder methods (eg show_send_output) expect these streaming attributes + # to always exist, even when we bypass the normal API streaming path. self.partial_response_content = "" + self.partial_response_function_call = None + self.partial_response_tool_calls = None try: hash_object, completion = self.copy_paste_completion(messages, model) From 66810550f60ba22c22136473f17e8676b0fd3e85 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:15:02 -0600 Subject: [PATCH 09/34] refactor: add docstrings and nosec hint in copypaste_coder.py Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index cccc9bea2ca..2689e99ab8d 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -28,9 +28,7 @@ def __init__(self, *args, **kwargs): self._init_prompts_from_selected_edit_format() def _init_prompts_from_selected_edit_format(self): - """ - Initialize ``self.gpt_prompts`` (and related prompt-dependent metadata) using the coder - class matching the currently selected edit format. + """Initialize ``self.gpt_prompts`` based on the currently selected edit format. This prevents AttributeError crashes when base ``Coder`` code assumes ``self.gpt_prompts`` exists (eg during message formatting, announcements, cancellation/cleanup paths, etc). @@ -116,6 +114,7 @@ def copy_paste_completion(self, messages, model): raise def content_to_text(content): + """Extract text from the various content formats Aider/LLMs can produce.""" if not content: return "" if isinstance(content, str): @@ -174,6 +173,7 @@ def content_to_text(content): # Estimate tokens locally using the model's tokenizer; fallback to heuristic. def _safe_token_count(text): + """Return token count via the model tokenizer, falling back to a heuristic.""" if not text: return 0 try: @@ -181,7 +181,7 @@ def _safe_token_count(text): if isinstance(count, int) and count >= 0: return count except Exception as ex: - # Try to map known LiteLLM exceptions to user-friendly messages, then fallback. + # Try to map known LiteLLM exceptions to user-friendly messages, then fall back. try: ex_info = LiteLLMExceptions().get_ex_info(ex) if ex_info and ex_info.description: @@ -216,6 +216,6 @@ def _safe_token_count(text): ) kwargs = dict(model=model.name, messages=messages, stream=False) - hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) + hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) # nosec B324 return hash_object, completion From 83f28c227dc246bf1b6a6feeb568d6551baa7dc8 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:21:36 -0600 Subject: [PATCH 10/34] fix: initialize partial_response_tool_calls to [] and update comments Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 2689e99ab8d..2cb0400e2ce 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -88,11 +88,12 @@ async def send(self, messages, model=None, functions=None, tools=None): self.io.reset_streaming_response() - # Base Coder methods (eg show_send_output) expect these streaming attributes - # to always exist, even when we bypass the normal API streaming path. + # Base Coder methods (eg show_send_output/preprocess_response) expect these streaming + # attributes to always exist, even when we bypass the normal API streaming path. self.partial_response_content = "" self.partial_response_function_call = None - self.partial_response_tool_calls = None + # preprocess_response() does len(self.partial_response_tool_calls), so it must not be None. + self.partial_response_tool_calls = [] try: hash_object, completion = self.copy_paste_completion(messages, model) @@ -219,3 +220,6 @@ def _safe_token_count(text): hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) # nosec B324 return hash_object, completion +```bash +pytest +``` From 803e4aad41db17dad5180ad78961c91be669de9a Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:22:17 -0600 Subject: [PATCH 11/34] fix: remove stray code fence in copypaste_coder.py Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 2cb0400e2ce..ce7fa26c7f9 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -221,5 +221,5 @@ def _safe_token_count(text): return hash_object, completion ```bash -pytest +C:\Users\ccn\AppData\Roaming\uv\tools\aider-ce\Scripts\python.exe -m flake8 --select=E9,F821,F823,F831,F406,F407,F701,F702,F704,F706 --show-source --isolated aider\coders\copypaste_coder.py ``` From e7022952ba7ecab914b19b4948b7c822bd13c594 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:22:38 -0600 Subject: [PATCH 12/34] fix: remove stray bash fence and flake8 command from copypaste_coder.py Co-authored-by: aider-ce (openai/gpt-5.2) --- aider/coders/copypaste_coder.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index ce7fa26c7f9..3d4f0c40f11 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -218,8 +218,4 @@ def _safe_token_count(text): kwargs = dict(model=model.name, messages=messages, stream=False) hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) # nosec B324 - return hash_object, completion -```bash -C:\Users\ccn\AppData\Roaming\uv\tools\aider-ce\Scripts\python.exe -m flake8 --select=E9,F821,F823,F831,F406,F407,F701,F702,F704,F706 --show-source --isolated aider\coders\copypaste_coder.py -``` From 22789c4b6e11093f0e4ee0482e8a5a1f40ed0004 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 11:59:21 -0600 Subject: [PATCH 13/34] Tests for copypaste coder --- tests/basic/test_main.py | 23 +++- tests/coders/test_copypaste_coder.py | 166 +++++++++++++++++++++++++++ 2 files changed, 188 insertions(+), 1 deletion(-) create mode 100644 tests/coders/test_copypaste_coder.py diff --git a/tests/basic/test_main.py b/tests/basic/test_main.py index 5cd128aba8a..6f4bc003266 100644 --- a/tests/basic/test_main.py +++ b/tests/basic/test_main.py @@ -11,7 +11,7 @@ from prompt_toolkit.input import DummyInput from prompt_toolkit.output import DummyOutput -from aider.coders import Coder +from aider.coders import Coder, CopyPasteCoder from aider.dump import dump # noqa: F401 from aider.io import InputOutput from aider.main import check_gitignore, load_dotenv_files, main, setup_git @@ -89,6 +89,27 @@ async def test_main_with_subdir_repo_fnames(self, _): self.assertTrue((subdir / "foo.txt").exists()) self.assertTrue((subdir / "bar.txt").exists()) + async def test_main_copy_paste_model_overrides(self): + overrides = json.dumps({"gpt-4o": {"fast": {"temperature": 0.42}}}) + coder = await main( + [ + "--no-git", + "--exit", + "--yes", + "--model", + "cp:gpt-4o:fast", + "--model-overrides", + overrides, + ], + input=DummyInput(), + output=DummyOutput(), + return_coder=True, + ) + + self.assertIsInstance(coder, CopyPasteCoder) + self.assertTrue(coder.main_model.copy_paste_instead_of_api) + self.assertEqual(coder.main_model.override_kwargs, {"temperature": 0.42}) + async def test_main_with_git_config_yml(self): make_repo() diff --git a/tests/coders/test_copypaste_coder.py b/tests/coders/test_copypaste_coder.py new file mode 100644 index 00000000000..7db909943c8 --- /dev/null +++ b/tests/coders/test_copypaste_coder.py @@ -0,0 +1,166 @@ +import hashlib +import json +from types import SimpleNamespace +from unittest.mock import MagicMock, call, patch + +import pytest + +from aider.coders.copypaste_coder import CopyPasteCoder +from aider.coders.editblock_coder import EditBlockCoder + + +def test_init_prompts_uses_selected_edit_format(): + coder = CopyPasteCoder.__new__(CopyPasteCoder) + coder.args = SimpleNamespace(edit_format="diff") + coder.main_model = SimpleNamespace(edit_format=None) + coder.edit_format = None + coder.gpt_prompts = None + + coder._init_prompts_from_selected_edit_format() + + assert coder.gpt_prompts is EditBlockCoder.gpt_prompts + assert coder.edit_format == EditBlockCoder.edit_format + + +def test_init_prompts_preserves_existing_when_no_match(monkeypatch): + coder = CopyPasteCoder.__new__(CopyPasteCoder) + coder.args = SimpleNamespace(edit_format="custom-format") + coder.main_model = SimpleNamespace(edit_format=None) + coder.edit_format = "original-format" + coder.gpt_prompts = "original-prompts" + + import aider.coders as coders + + monkeypatch.setattr(coders, "__all__", [], raising=False) + + coder._init_prompts_from_selected_edit_format() + + assert coder.gpt_prompts == "original-prompts" + assert coder.edit_format == "original-format" + + +@pytest.mark.asyncio +async def test_send_uses_copy_paste_flow(monkeypatch): + coder = CopyPasteCoder.__new__(CopyPasteCoder) + + io = MagicMock() + coder.io = io + coder.stream = False + coder.partial_response_content = "" + coder.partial_response_tool_calls = [] + coder.partial_response_function_call = None + coder.chat_completion_call_hashes = [] + coder.show_send_output = MagicMock() + coder.calculate_and_show_tokens_and_cost = MagicMock() + + def fake_preprocess_response(): + coder.partial_response_content = "final-response" + + coder.preprocess_response = fake_preprocess_response + + class ModelStub: + copy_paste_instead_of_api = True + name = "cp:gpt-4o" + + @staticmethod + def token_count(text): + return len(text) + + coder.main_model = ModelStub() + + hash_obj = MagicMock() + hash_obj.hexdigest.return_value = "hash" + completion = MagicMock() + + with patch.object( + CopyPasteCoder, "copy_paste_completion", return_value=(hash_obj, completion) + ) as mock_completion: + messages = [{"role": "user", "content": "Hello"}] + chunks = [chunk async for chunk in coder.send(messages)] + + assert chunks == [] + mock_completion.assert_called_once_with(messages, coder.main_model) + coder.show_send_output.assert_called_once_with(completion) + coder.calculate_and_show_tokens_and_cost.assert_called_once_with(messages, completion) + assert coder.chat_completion_call_hashes == ["hash"] + coder.io.ai_output.assert_called_once_with("final-response") + + +def test_copy_paste_completion_interacts_with_clipboard(monkeypatch): + coder = CopyPasteCoder.__new__(CopyPasteCoder) + + io = MagicMock() + coder.io = io + + import aider.copypaste as copypaste + + copy_mock = MagicMock() + read_mock = MagicMock(return_value="initial value") + wait_mock = MagicMock(return_value="assistant reply") + + monkeypatch.setattr(copypaste, "copy_to_clipboard", copy_mock) + monkeypatch.setattr(copypaste, "read_clipboard", read_mock) + monkeypatch.setattr(copypaste, "wait_for_clipboard_change", wait_mock) + + class DummyMessage: + def __init__(self, **kwargs): + self.data = kwargs + + class DummyChoices: + def __init__(self, **kwargs): + self.data = kwargs + + class DummyModelResponse: + def __init__(self, **kwargs): + self.kwargs = kwargs + + monkeypatch.setattr("aider.coders.copypaste_coder.litellm.Message", DummyMessage) + monkeypatch.setattr("aider.coders.copypaste_coder.litellm.Choices", DummyChoices) + monkeypatch.setattr( + "aider.coders.copypaste_coder.litellm.ModelResponse", DummyModelResponse + ) + + class ModelStub: + name = "cp:gpt-4o" + + @staticmethod + def token_count(text): + return len(text) + + model = ModelStub() + + messages = [ + {"role": "system", "content": "keep calm"}, + {"role": "user", "content": [{"text": "Hello"}, {"text": "!"}]}, + {"role": "assistant", "content": [{"text": "Prior"}, {"text": " reply"}]}, + ] + + hash_obj, completion = coder.copy_paste_completion(messages, model) + + expected_prompt = "SYSTEM:\nkeep calm\n\nUSER:\nHello!\n\nASSISTANT:\nPrior reply" + copy_mock.assert_called_once_with(expected_prompt) + read_mock.assert_called_once() + wait_mock.assert_called_once_with(initial="initial value") + + io.tool_output.assert_has_calls( + [ + call("Request copied to clipboard."), + call("Paste it into your LLM interface, then copy the reply back."), + call("Waiting for clipboard updates (Ctrl+C to cancel)..."), + ] + ) + + expected_hash = hashlib.sha1( + json.dumps({"model": model.name, "messages": messages, "stream": False}, sort_keys=True).encode() + ).hexdigest() + assert hash_obj.hexdigest() == expected_hash + + usage = completion.kwargs["usage"] + assert usage["prompt_tokens"] == len(expected_prompt) + assert usage["completion_tokens"] == len("assistant reply") + assert usage["total_tokens"] == len(expected_prompt) + len("assistant reply") + + choices = completion.kwargs["choices"] + assert len(choices) == 1 + choice_payload = choices[0].data + assert choice_payload["message"].data["content"] == "assistant reply" From 45aef2db9fd9269d25f480a44f5f8fbdb3943fbf Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 15:42:13 -0500 Subject: [PATCH 14/34] Bump Version --- aider/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aider/__init__.py b/aider/__init__.py index ce04b1d2e50..478a2f82558 100644 --- a/aider/__init__.py +++ b/aider/__init__.py @@ -1,6 +1,6 @@ from packaging import version -__version__ = "0.90.0.dev" +__version__ = "0.90.4.dev" safe_version = __version__ try: From 85b854b3b600724384f90efb3e25ac176f3da621 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 17:42:16 -0500 Subject: [PATCH 15/34] #280: Commit files on finished tool call in agent mode --- aider/coders/agent_coder.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/aider/coders/agent_coder.py b/aider/coders/agent_coder.py index 36a968df182..afe877ccca4 100644 --- a/aider/coders/agent_coder.py +++ b/aider/coders/agent_coder.py @@ -1202,6 +1202,9 @@ async def reply_completed(self): ) = await self._process_tool_commands(content) if self.agent_finished: + if len(self.files_edited_by_tools): + await self.auto_commit(self.files_edited_by_tools) + self.tool_usage_history = [] return True From e17a7070efaae098a138cda1bb70f457960daac3 Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 16:51:10 -0600 Subject: [PATCH 16/34] refactor: swap copy_paste_instead_of_api for copy_paste_mode --- aider/coders/base_coder.py | 11 ++++++++--- aider/coders/copypaste_coder.py | 4 ++-- aider/{ => helpers}/copypaste.py | 0 aider/main.py | 5 ++++- aider/models.py | 19 +++++++++++++------ aider/website/_data/blame.yml | 2 +- tests/basic/test_main.py | 20 +++++++++++++++++++- tests/coders/test_copypaste_coder.py | 7 +++++-- 8 files changed, 52 insertions(+), 16 deletions(-) rename aider/{ => helpers}/copypaste.py (100%) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 43fa9986c42..f312e0eecc5 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -229,7 +229,9 @@ async def create( kwargs = use_kwargs from_coder.ok_to_warm_cache = False - if getattr(main_model, "copy_paste_instead_of_api", False): + if getattr(main_model, "copy_paste_mode", False) and getattr( + main_model, "copy_paste_transport", "api" + ) != "api": res = coders.CopyPasteCoder(main_model, io, args=args, **kwargs) await res.initialize_mcp_tools() res.original_kwargs = dict(kwargs) @@ -385,6 +387,9 @@ def __init__( self.io = io self.io.coder = weakref.ref(self) + self.manual_copy_paste = getattr(main_model, "copy_paste_transport", "api") != "api" + self.copy_paste_mode = getattr(main_model, "copy_paste_mode", False) or auto_copy_context + self.shell_commands = [] self.partial_response_tool_calls = [] @@ -405,7 +410,7 @@ def __init__( self.main_model.reasoning_tag if self.main_model.reasoning_tag else REASONING_TAG ) - self.stream = stream and main_model.streaming and not main_model.copy_paste_instead_of_api + self.stream = stream and main_model.streaming and not self.manual_copy_paste if cache_prompts and self.main_model.cache_control: self.add_cache_headers = True @@ -587,7 +592,7 @@ def get_announcements(self): output += ", prompt cache" if main_model.info.get("supports_assistant_prefill"): output += ", infinite output" - if main_model.copy_paste_instead_of_api: + if self.copy_paste_mode: output += ", copy/paste mode" lines.append(output) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 3d4f0c40f11..61b332ff81d 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -76,7 +76,7 @@ def _init_prompts_from_selected_edit_format(self): async def send(self, messages, model=None, functions=None, tools=None): model = model or self.main_model - if not getattr(model, "copy_paste_instead_of_api", False): + if getattr(model, "copy_paste_transport", "api") == "api": async for chunk in super().send(messages, model=model, functions=functions, tools=tools): yield chunk return @@ -108,7 +108,7 @@ async def send(self, messages, model=None, functions=None, tools=None): def copy_paste_completion(self, messages, model): try: - from aider import copypaste + from aider.helpers import copypaste except ImportError: # pragma: no cover - import error path self.io.tool_error("copy/paste mode requires the pyperclip package.") self.io.tool_output("Install it with: pip install pyperclip") diff --git a/aider/copypaste.py b/aider/helpers/copypaste.py similarity index 100% rename from aider/copypaste.py rename to aider/helpers/copypaste.py diff --git a/aider/main.py b/aider/main.py index 2f7998b9d56..658def04474 100644 --- a/aider/main.py +++ b/aider/main.py @@ -41,7 +41,7 @@ from aider.coders import Coder from aider.coders.base_coder import UnknownEditFormat from aider.commands import Commands, SwitchCoder -from aider.copypaste import ClipboardWatcher +from aider.helpers.copypaste import ClipboardWatcher from aider.deprecated import handle_deprecated_model_args from aider.format_settings import format_settings, scrub_sensitive_info from aider.helpers.file_searcher import generate_search_path_list @@ -1078,6 +1078,9 @@ def apply_model_overrides(model_name): override_kwargs=main_model_overrides, ) + if args.copy_paste and main_model.copy_paste_transport == "api": + main_model.enable_copy_paste_mode() + # Check if deprecated remove_reasoning is set if main_model.remove_reasoning is not None: io.tool_warning( diff --git a/aider/models.py b/aider/models.py index e0ebf2f9bd3..60338b00281 100644 --- a/aider/models.py +++ b/aider/models.py @@ -331,9 +331,12 @@ def __init__( self.verbose = verbose self.override_kwargs = override_kwargs or {} - self.copy_paste_instead_of_api = provided_model.startswith(COPY_PASTE_PREFIX) - if self.copy_paste_instead_of_api: + self.copy_paste_mode = False + self.copy_paste_transport = "api" + + if provided_model.startswith(COPY_PASTE_PREFIX): model = provided_model.removeprefix(COPY_PASTE_PREFIX) + self.enable_copy_paste_mode(transport="clipboard") else: model = provided_model @@ -370,7 +373,7 @@ def __init__( else: self.get_editor_model(editor_model, editor_edit_format) - if self.copy_paste_instead_of_api: + if self.copy_paste_transport != "api": self.streaming = False def get_model_info(self, model): @@ -608,6 +611,10 @@ def apply_generic_model_settings(self, model): def __str__(self): return self.name + def enable_copy_paste_mode(self, *, transport="api"): + self.copy_paste_mode = True + self.copy_paste_transport = transport + def get_weak_model(self, provided_weak_model): # If provided_weak_model is False, set weak_model to self if provided_weak_model is False: @@ -615,7 +622,7 @@ def get_weak_model(self, provided_weak_model): self.weak_model_name = None return - if self.copy_paste_instead_of_api: + if self.copy_paste_transport != "api": self.weak_model = self self.weak_model_name = None return @@ -649,7 +656,7 @@ def commit_message_models(self): return [self.weak_model, self] def get_editor_model(self, provided_editor_model, editor_edit_format): - if self.copy_paste_instead_of_api: + if self.copy_paste_transport != "api": provided_editor_model = False self.editor_model_name = self.name self.editor_model = self @@ -1231,7 +1238,7 @@ async def sanity_check_models(io, main_model): async def sanity_check_model(io, model): - if getattr(model, 'copy_paste_instead_of_api', False): + if getattr(model, "copy_paste_transport", "api") != "api": return False show = False diff --git a/aider/website/_data/blame.yml b/aider/website/_data/blame.yml index 2d302504e6e..78a1f5a9db2 100644 --- a/aider/website/_data/blame.yml +++ b/aider/website/_data/blame.yml @@ -3383,7 +3383,7 @@ aider/commands.py: Paul Gauthier: 28 Paul Gauthier (aider): 21 - aider/copypaste.py: + aider/helpers/copypaste.py: Paul Gauthier: 5 Paul Gauthier (aider): 60 aider/exceptions.py: diff --git a/tests/basic/test_main.py b/tests/basic/test_main.py index 8fe63211700..7ed6564e5c3 100644 --- a/tests/basic/test_main.py +++ b/tests/basic/test_main.py @@ -107,9 +107,27 @@ async def test_main_copy_paste_model_overrides(self): ) self.assertIsInstance(coder, CopyPasteCoder) - self.assertTrue(coder.main_model.copy_paste_instead_of_api) + self.assertTrue(coder.main_model.copy_paste_mode) + self.assertEqual(coder.main_model.copy_paste_transport, "clipboard") self.assertEqual(coder.main_model.override_kwargs, {"temperature": 0.42}) + @patch("aider.main.ClipboardWatcher") + async def test_main_copy_paste_flag_sets_mode(self, mock_watcher): + mock_watcher.return_value = MagicMock() + + coder = await main( + ["--no-git", "--exit", "--yes", "--copy-paste"], + input=DummyInput(), + output=DummyOutput(), + return_coder=True, + ) + + self.assertNotIsInstance(coder, CopyPasteCoder) + self.assertTrue(coder.main_model.copy_paste_mode) + self.assertEqual(coder.main_model.copy_paste_transport, "api") + self.assertTrue(coder.copy_paste_mode) + self.assertFalse(coder.manual_copy_paste) + async def test_main_with_git_config_yml(self): make_repo() diff --git a/tests/coders/test_copypaste_coder.py b/tests/coders/test_copypaste_coder.py index 7db909943c8..9804e0c1300 100644 --- a/tests/coders/test_copypaste_coder.py +++ b/tests/coders/test_copypaste_coder.py @@ -59,7 +59,8 @@ def fake_preprocess_response(): coder.preprocess_response = fake_preprocess_response class ModelStub: - copy_paste_instead_of_api = True + copy_paste_mode = True + copy_paste_transport = "clipboard" name = "cp:gpt-4o" @staticmethod @@ -92,7 +93,7 @@ def test_copy_paste_completion_interacts_with_clipboard(monkeypatch): io = MagicMock() coder.io = io - import aider.copypaste as copypaste + import aider.helpers.copypaste as copypaste copy_mock = MagicMock() read_mock = MagicMock(return_value="initial value") @@ -122,6 +123,8 @@ def __init__(self, **kwargs): class ModelStub: name = "cp:gpt-4o" + copy_paste_mode = True + copy_paste_transport = "clipboard" @staticmethod def token_count(text): From c482ce56cffdfc4d9922e1eb7dd71bed1f7efbdb Mon Sep 17 00:00:00 2001 From: Chris Nestrud Date: Fri, 19 Dec 2025 16:57:01 -0600 Subject: [PATCH 17/34] refactor: align copy_paste_transport checks to clipboard value --- aider/coders/base_coder.py | 4 ++-- aider/models.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index f312e0eecc5..0ce47c76afc 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -231,7 +231,7 @@ async def create( if getattr(main_model, "copy_paste_mode", False) and getattr( main_model, "copy_paste_transport", "api" - ) != "api": + ) == "clipboard": res = coders.CopyPasteCoder(main_model, io, args=args, **kwargs) await res.initialize_mcp_tools() res.original_kwargs = dict(kwargs) @@ -387,7 +387,7 @@ def __init__( self.io = io self.io.coder = weakref.ref(self) - self.manual_copy_paste = getattr(main_model, "copy_paste_transport", "api") != "api" + self.manual_copy_paste = getattr(main_model, "copy_paste_transport", "api") == "clipboard" self.copy_paste_mode = getattr(main_model, "copy_paste_mode", False) or auto_copy_context self.shell_commands = [] diff --git a/aider/models.py b/aider/models.py index 60338b00281..3e4c1514447 100644 --- a/aider/models.py +++ b/aider/models.py @@ -373,7 +373,7 @@ def __init__( else: self.get_editor_model(editor_model, editor_edit_format) - if self.copy_paste_transport != "api": + if self.copy_paste_transport == "clipboard": self.streaming = False def get_model_info(self, model): @@ -622,7 +622,7 @@ def get_weak_model(self, provided_weak_model): self.weak_model_name = None return - if self.copy_paste_transport != "api": + if self.copy_paste_transport == "clipboard": self.weak_model = self self.weak_model_name = None return @@ -656,7 +656,7 @@ def commit_message_models(self): return [self.weak_model, self] def get_editor_model(self, provided_editor_model, editor_edit_format): - if self.copy_paste_transport != "api": + if self.copy_paste_transport == "clipboard": provided_editor_model = False self.editor_model_name = self.name self.editor_model = self @@ -1238,7 +1238,7 @@ async def sanity_check_models(io, main_model): async def sanity_check_model(io, model): - if getattr(model, "copy_paste_transport", "api") != "api": + if getattr(model, "copy_paste_transport", "api") == "clipboard": return False show = False From 350ede1664e6c23a590415b6d0952346ba083a05 Mon Sep 17 00:00:00 2001 From: chrisnestrud Date: Fri, 19 Dec 2025 17:13:49 -0600 Subject: [PATCH 18/34] Update sessions.md to have session name mirror model name --- aider/website/docs/sessions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aider/website/docs/sessions.md b/aider/website/docs/sessions.md index 1afdb45ec61..1cb38b21328 100644 --- a/aider/website/docs/sessions.md +++ b/aider/website/docs/sessions.md @@ -205,7 +205,7 @@ If a session fails to load: ``` # Save session with specific model /model gpt-5 -/save-session gpt4-session +/save-session gpt5-session # Try different model /model claude-sonnet-4.5 From ca5ad7817fa1a426c16d843df057896cbb88e9b4 Mon Sep 17 00:00:00 2001 From: James Williams Date: Fri, 19 Dec 2025 20:14:04 -0300 Subject: [PATCH 19/34] Auto-commit when finished. --- aider/coders/agent_coder.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aider/coders/agent_coder.py b/aider/coders/agent_coder.py index 36a968df182..ee2e66e5c96 100644 --- a/aider/coders/agent_coder.py +++ b/aider/coders/agent_coder.py @@ -1203,6 +1203,8 @@ async def reply_completed(self): if self.agent_finished: self.tool_usage_history = [] + if self.files_edited_by_tools: + _ = await self.auto_commit(self.files_edited_by_tools) return True # Since we are no longer suppressing, the partial_response_content IS the final content. From 388b005c52392596274277279fed554eb67ea056 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 18:17:36 -0500 Subject: [PATCH 20/34] Don't display line format in tui mode, the input area already does --- aider/coders/base_coder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index f4a311878be..a0286e4699d 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -639,7 +639,7 @@ def get_announcements(self): if self.done_messages: lines.append("Restored previous conversation history.") - if self.io.multiline_mode: + if self.io.multiline_mode and not self.args.tui: lines.append("Multiline mode: Enabled. Enter inserts newline, Alt-Enter submits text") return lines From ef07f110f9d90e279609381c0375633c90e003f5 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 18:25:06 -0500 Subject: [PATCH 21/34] #281: They run AI model's but can't support the full json schema spec, shame --- aider/tools/insert_block.py | 19 ++++--------------- aider/tools/show_numbered_context.py | 9 ++++----- 2 files changed, 8 insertions(+), 20 deletions(-) diff --git a/aider/tools/insert_block.py b/aider/tools/insert_block.py index 68acc7738c3..1c27811ec89 100644 --- a/aider/tools/insert_block.py +++ b/aider/tools/insert_block.py @@ -22,7 +22,10 @@ class Tool(BaseTool): "type": "function", "function": { "name": "InsertBlock", - "description": "Insert a block of content into a file.", + "description": ( + "Insert a block of content into a file. Only use one of: after_pattern," + " before_pattern, position." + ), "parameters": { "type": "object", "properties": { @@ -38,20 +41,6 @@ class Tool(BaseTool): "use_regex": {"type": "boolean", "default": False}, }, "required": ["file_path", "content"], - "oneOf": [ - { - "required": ["after_pattern"], - "not": {"required": ["before_pattern", "position"]}, - }, - { - "required": ["before_pattern"], - "not": {"required": ["after_pattern", "position"]}, - }, - { - "required": ["position"], - "not": {"required": ["after_pattern", "before_pattern"]}, - }, - ], }, }, } diff --git a/aider/tools/show_numbered_context.py b/aider/tools/show_numbered_context.py index 7bfd0d43b5b..90c61b97e71 100644 --- a/aider/tools/show_numbered_context.py +++ b/aider/tools/show_numbered_context.py @@ -15,7 +15,10 @@ class Tool(BaseTool): "type": "function", "function": { "name": "ShowNumberedContext", - "description": "Show numbered lines of context around a pattern or line number.", + "description": ( + "Show numbered lines of context around a pattern or line number. Only use one of:" + " pattern, line_number" + ), "parameters": { "type": "object", "properties": { @@ -25,10 +28,6 @@ class Tool(BaseTool): "context_lines": {"type": "integer", "default": 3}, }, "required": ["file_path"], - "oneOf": [ - {"required": ["pattern"], "not": {"required": ["line_number"]}}, - {"required": ["line_number"], "not": {"required": ["pattern"]}}, - ], }, }, } From 3ddb829f95301c0b315495574e4ac7c3c37cd9f4 Mon Sep 17 00:00:00 2001 From: James Williams Date: Fri, 19 Dec 2025 20:26:04 -0300 Subject: [PATCH 22/34] Fix git-diff tool. --- aider/repo.py | 7 +++++-- aider/tools/git_diff.py | 3 ++- tests/tools/test_git_diff.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 3 deletions(-) create mode 100644 tests/tools/test_git_diff.py diff --git a/aider/repo.py b/aider/repo.py index f2016075b22..8feaf18914c 100644 --- a/aider/repo.py +++ b/aider/repo.py @@ -417,14 +417,17 @@ def get_diffs(self, fnames=None): except ANY_GIT_ERROR as err: self.io.tool_error(f"Unable to diff: {err}") - def diff_commits(self, pretty, from_commit, to_commit): + def diff_commits(self, pretty, from_commit, to_commit=None): args = [] if pretty: args += ["--color"] else: args += ["--color=never"] - args += [from_commit, to_commit] + if to_commit is not None: + args += [from_commit, to_commit] + else: + args += [from_commit] diffs = self.repo.git.diff(*args, stdout_as_string=False).decode( self.io.encoding, "replace" ) diff --git a/aider/tools/git_diff.py b/aider/tools/git_diff.py index d90577853e1..27e4e73ff46 100644 --- a/aider/tools/git_diff.py +++ b/aider/tools/git_diff.py @@ -36,7 +36,8 @@ def execute(cls, coder, branch=None): try: if branch: - diff = coder.repo.diff_commits(False, branch, "HEAD") + # Diff working tree against the requested branch/commit + diff = coder.repo.diff_commits(False, branch, None) else: diff = coder.repo.diff_commits(False, "HEAD", None) diff --git a/tests/tools/test_git_diff.py b/tests/tools/test_git_diff.py new file mode 100644 index 00000000000..7924ceb9603 --- /dev/null +++ b/tests/tools/test_git_diff.py @@ -0,0 +1,29 @@ +from pathlib import Path +from types import SimpleNamespace + +import git + +from aider.io import InputOutput +from aider.repo import GitRepo +from aider.tools import git_diff +from aider.utils import GitTemporaryDirectory + + +def test_gitdiff_head_argument_includes_working_tree_changes(): + with GitTemporaryDirectory(): + repo = git.Repo() + fname = Path("example.txt") + fname.write_text("original\n") + repo.git.add(str(fname)) + repo.config_writer().set_value("commit", "gpgsign", "false").release() + repo.git.commit("-m", "initial") + + fname.write_text("updated\n") + + io = InputOutput() + git_repo = GitRepo(io, None, ".") + coder = SimpleNamespace(repo=git_repo, io=io) + + result = git_diff.Tool.execute(coder, branch="HEAD") + + assert "updated" in result From 40917cc2b2c1502d8a1b058b6252307d552b5cda Mon Sep 17 00:00:00 2001 From: James Williams Date: Fri, 19 Dec 2025 20:31:08 -0300 Subject: [PATCH 23/34] Avoid exception when detached. --- aider/tools/git_branch.py | 11 ++++++-- tests/tools/test_git_branch.py | 51 ++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 3 deletions(-) create mode 100644 tests/tools/test_git_branch.py diff --git a/aider/tools/git_branch.py b/aider/tools/git_branch.py index 4f3bd72cb68..a04c2e715d4 100644 --- a/aider/tools/git_branch.py +++ b/aider/tools/git_branch.py @@ -113,12 +113,17 @@ def execute( args.extend(["--format", format]) # Execute git command - result = coder.repo.repo.git.branch(*args) + result = coder.repo.repo.git.branch(*args).strip() # If no result and show_current was used, get current branch directly if not result and show_current: - current_branch = coder.repo.repo.active_branch.name - return current_branch + try: + head = coder.repo.repo.head + if head.is_detached: + return "HEAD (detached)" + return coder.repo.repo.active_branch.name + except ANY_GIT_ERROR: + return "No current branch found." return result if result else "No branches found matching the criteria." diff --git a/tests/tools/test_git_branch.py b/tests/tools/test_git_branch.py new file mode 100644 index 00000000000..912e4d5cbd9 --- /dev/null +++ b/tests/tools/test_git_branch.py @@ -0,0 +1,51 @@ +from pathlib import Path +from types import SimpleNamespace + +import git + +from aider.io import InputOutput +from aider.repo import GitRepo +from aider.tools import git_branch +from aider.utils import GitTemporaryDirectory + + +def _make_repo(): + repo = git.Repo() + repo.config_writer().set_value("commit", "gpgsign", "false").release() + return repo + + +def test_gitbranch_show_current_returns_branch_name(): + with GitTemporaryDirectory(): + repo = _make_repo() + Path("file.txt").write_text("content\n") + repo.git.add("file.txt") + repo.git.commit("-m", "init") + repo.git.checkout("-b", "feature") + + io = InputOutput() + git_repo = GitRepo(io, None, ".") + coder = SimpleNamespace(repo=git_repo, io=io) + + result = git_branch.Tool.execute(coder, show_current=True) + + assert result.strip() == "feature" + + +def test_gitbranch_show_current_handles_detached_head(): + with GitTemporaryDirectory(): + repo = _make_repo() + Path("file.txt").write_text("content\n") + repo.git.add("file.txt") + repo.git.commit("-m", "init") + + commit_sha = repo.head.commit.hexsha + repo.git.checkout(commit_sha) + + io = InputOutput() + git_repo = GitRepo(io, None, ".") + coder = SimpleNamespace(repo=git_repo, io=io) + + result = git_branch.Tool.execute(coder, show_current=True) + + assert result.strip() == "HEAD (detached)" From 93736d65dbd5c7a076f6a77c9eafbb741a396d5f Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 18:31:19 -0500 Subject: [PATCH 24/34] Fix Formatting --- aider/coders/base_coder.py | 7 ++++--- aider/coders/copypaste_coder.py | 9 +++++++-- aider/models.py | 3 ++- tests/coders/test_copypaste_coder.py | 8 ++++---- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 298d74c5930..9c9d5a34f8e 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -229,9 +229,10 @@ async def create( kwargs = use_kwargs from_coder.ok_to_warm_cache = False - if getattr(main_model, "copy_paste_mode", False) and getattr( - main_model, "copy_paste_transport", "api" - ) == "clipboard": + if ( + getattr(main_model, "copy_paste_mode", False) + and getattr(main_model, "copy_paste_transport", "api") == "clipboard" + ): res = coders.CopyPasteCoder(main_model, io, args=args, **kwargs) await res.initialize_mcp_tools() res.original_kwargs = dict(kwargs) diff --git a/aider/coders/copypaste_coder.py b/aider/coders/copypaste_coder.py index 61b332ff81d..f7e4e55337b 100644 --- a/aider/coders/copypaste_coder.py +++ b/aider/coders/copypaste_coder.py @@ -57,7 +57,10 @@ def _init_prompts_from_selected_edit_format(self): target_coder_class = None if coders is not None: for coder_cls in getattr(coders, "__all__", []): - if hasattr(coder_cls, "edit_format") and coder_cls.edit_format == selected_edit_format: + if ( + hasattr(coder_cls, "edit_format") + and coder_cls.edit_format == selected_edit_format + ): target_coder_class = coder_cls break @@ -77,7 +80,9 @@ async def send(self, messages, model=None, functions=None, tools=None): model = model or self.main_model if getattr(model, "copy_paste_transport", "api") == "api": - async for chunk in super().send(messages, model=model, functions=functions, tools=tools): + async for chunk in super().send( + messages, model=model, functions=functions, tools=tools + ): yield chunk return diff --git a/aider/models.py b/aider/models.py index 3e4c1514447..60fb51ea107 100644 --- a/aider/models.py +++ b/aider/models.py @@ -317,7 +317,8 @@ def __init__( weak_model=None, editor_model=None, editor_edit_format=None, - verbose=False, io=None, + verbose=False, + io=None, override_kwargs=None, ): # Determine copy/paste mode and map model aliases diff --git a/tests/coders/test_copypaste_coder.py b/tests/coders/test_copypaste_coder.py index 9804e0c1300..ac7b5b90ebc 100644 --- a/tests/coders/test_copypaste_coder.py +++ b/tests/coders/test_copypaste_coder.py @@ -117,9 +117,7 @@ def __init__(self, **kwargs): monkeypatch.setattr("aider.coders.copypaste_coder.litellm.Message", DummyMessage) monkeypatch.setattr("aider.coders.copypaste_coder.litellm.Choices", DummyChoices) - monkeypatch.setattr( - "aider.coders.copypaste_coder.litellm.ModelResponse", DummyModelResponse - ) + monkeypatch.setattr("aider.coders.copypaste_coder.litellm.ModelResponse", DummyModelResponse) class ModelStub: name = "cp:gpt-4o" @@ -154,7 +152,9 @@ def token_count(text): ) expected_hash = hashlib.sha1( - json.dumps({"model": model.name, "messages": messages, "stream": False}, sort_keys=True).encode() + json.dumps( + {"model": model.name, "messages": messages, "stream": False}, sort_keys=True + ).encode() ).hexdigest() assert hash_obj.hexdigest() == expected_hash From 1dfd9403f084245b90f84776395f7fcc46393da6 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 19:19:57 -0500 Subject: [PATCH 25/34] Fix conflicts with model overrides and copy paste coder --- aider/main.py | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/aider/main.py b/aider/main.py index 658def04474..188329532fe 100644 --- a/aider/main.py +++ b/aider/main.py @@ -41,9 +41,9 @@ from aider.coders import Coder from aider.coders.base_coder import UnknownEditFormat from aider.commands import Commands, SwitchCoder -from aider.helpers.copypaste import ClipboardWatcher from aider.deprecated import handle_deprecated_model_args from aider.format_settings import format_settings, scrub_sensitive_info +from aider.helpers.copypaste import ClipboardWatcher from aider.helpers.file_searcher import generate_search_path_list from aider.history import ChatSummary from aider.io import InputOutput @@ -984,31 +984,24 @@ def apply_model_overrides(model_name): """ if not model_name: return model_name, {} - entry = override_index.get(model_name) - if not entry: - return model_name, {} - base_model, cfg = entry - return base_model, cfg.copy() + # Check for copy-paste prefix prefix = "" - base_model = model_name if model_name.startswith(models.COPY_PASTE_PREFIX): prefix = models.COPY_PASTE_PREFIX - base_model = model_name[len(prefix) :] + model_name = model_name[len(prefix) :] - if ":" in base_model: - base_model, suffix = base_model.rsplit(":", 1) - else: - suffix = None - - override_kwargs = {} - if suffix and base_model in overrides and suffix in overrides[base_model]: - override_kwargs = overrides[base_model][suffix].copy() - - if prefix: - base_model = prefix + base_model + # Check if the model_name (without prefix) is in override_index + entry = override_index.get(model_name) + if not entry: + # No override found, return original name with prefix + model_name = prefix + model_name + return model_name, {} - return base_model, override_kwargs + base_model, cfg = entry + # Re-add prefix if it was present + model_name = prefix + base_model + return model_name, cfg.copy() # Apply overrides (if any) to the selected models main_model_name, main_model_overrides = apply_model_overrides(args.model) From 152e5d371e65d31cabc935b8fde1d528bbeccd1e Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 19:24:14 -0500 Subject: [PATCH 26/34] Set AgentPrompts as a class level artifact --- aider/coders/agent_coder.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/aider/coders/agent_coder.py b/aider/coders/agent_coder.py index afe877ccca4..5d1d0f4a77a 100644 --- a/aider/coders/agent_coder.py +++ b/aider/coders/agent_coder.py @@ -81,12 +81,9 @@ class AgentCoder(Coder): """Mode where the LLM autonomously manages which files are in context.""" edit_format = "agent" + gpt_prompts = AgentPrompts() def __init__(self, *args, **kwargs): - # Initialize appropriate prompt set before calling parent constructor - # This needs to happen before super().__init__ so the parent class has access to gpt_prompts - self.gpt_prompts = AgentPrompts() - # Dictionary to track recently removed files self.recently_removed = {} From f3127a12cb4d7ee519da1349f31fd901f6069afd Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 20:18:31 -0500 Subject: [PATCH 27/34] #166: Format JSON and python literals in error text --- aider/io.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/aider/io.py b/aider/io.py index 80854cfcc6d..2b8a6ed55a7 100644 --- a/aider/io.py +++ b/aider/io.py @@ -1,6 +1,8 @@ +import ast import asyncio import base64 import functools +import json import os import re import shutil @@ -1389,11 +1391,39 @@ def _tool_message(self, message="", strip=True, color=None): message = str(message).encode("ascii", errors="replace").decode("ascii") self.stream_print(message, style=style) + def format_json_in_string(self, text): + if not isinstance(text, str): + return text + + def replace_json(match): + full_match = match.group(0) + try: + # Try to parse as a python literal (e.g. b'{...}') + try: + parsed = ast.literal_eval(full_match) + if isinstance(parsed, bytes): + parsed = parsed.decode("utf-8", errors="ignore") + if isinstance(parsed, str): + data = json.loads(parsed, strict=False) + return "\n" + json.dumps(data, indent=2) + "\n" + except (ValueError, SyntaxError, json.JSONDecodeError): + pass + except Exception: + pass + return full_match + + # Match b'{...}', b"[...]", '{...}', "[...]" + # Handle escaped quotes with (? Date: Fri, 19 Dec 2025 20:21:19 -0500 Subject: [PATCH 28/34] #286: Early return instead of uncaught exception --- aider/coders/base_coder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 9c9d5a34f8e..fc74626e096 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -2837,8 +2837,8 @@ def add_assistant_reply_to_cur_messages(self): # but response.dict() is the Pydantic V1 method name. response_dict = dict(response) except TypeError: - print("Neither model_dump() nor dict() worked as expected.") - raise + print("Response parsing error.") + return msg = response_dict["choices"][0]["message"] From ba42944263bc024441064abfc276179e1498ad95 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 20:23:29 -0500 Subject: [PATCH 29/34] Remove duplicate auto commits --- aider/coders/agent_coder.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/aider/coders/agent_coder.py b/aider/coders/agent_coder.py index b2bcd7c5e7d..e717bed6ccb 100644 --- a/aider/coders/agent_coder.py +++ b/aider/coders/agent_coder.py @@ -1199,9 +1199,6 @@ async def reply_completed(self): ) = await self._process_tool_commands(content) if self.agent_finished: - if len(self.files_edited_by_tools): - await self.auto_commit(self.files_edited_by_tools) - self.tool_usage_history = [] if self.files_edited_by_tools: _ = await self.auto_commit(self.files_edited_by_tools) From 8803f20572c2b03bb2b1d96eaa0cdf87cd6644d5 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 20:37:48 -0500 Subject: [PATCH 30/34] Make CONTRIBUTING.md a bit more strongly worded --- CONTRIBUTING.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 281771ceca6..65b9562bbf5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -64,11 +64,13 @@ docker build -t cecli -f docker/Dockerfile . ## Coding Standards -It really helps the merge process if your PR: +In order for your PR to be accepted it must: -1. complies with project coding standards -2. includes test coverage -3. updates the relevant user-facing documentation, including the output of `/help` and `--help` as well as notes in config files and the web-site. +1. Comply with project coding standards (including running the pre-commit formatting hooks) +2. Include test coverage +3. Update relevant user-facing documentation: + - Primary documentation will live in `aider/website/docs/config/` + - Check new cli arguments with the output of `/help` and `--help` ### Python Compatibility From 644d5916028eee80e987a69ac8c1c557391f9f01 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 20:40:28 -0500 Subject: [PATCH 31/34] More CONTRIBUTNG.md verbiage --- CONTRIBUTING.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 65b9562bbf5..dfff1d7d146 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -66,9 +66,10 @@ docker build -t cecli -f docker/Dockerfile . In order for your PR to be accepted it must: -1. Comply with project coding standards (including running the pre-commit formatting hooks) -2. Include test coverage -3. Update relevant user-facing documentation: +1. Be up to date with the main branch +2. Comply with project coding standards (including running the pre-commit formatting hooks) +3. Include test coverage +4. Update relevant user-facing documentation: - Primary documentation will live in `aider/website/docs/config/` - Check new cli arguments with the output of `/help` and `--help` From 8bd4550ca9dec0a9b289790c9fd550e71787c1cb Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 22:59:18 -0500 Subject: [PATCH 32/34] Replace netoworkx with rustworkx for perfomance improvement --- aider/main.py | 1 - aider/repomap.py | 67 +++++++++++++++++++++-------- requirements.txt | 9 ++-- requirements/common-constraints.txt | 4 +- requirements/requirements.in | 17 ++------ 5 files changed, 62 insertions(+), 36 deletions(-) diff --git a/aider/main.py b/aider/main.py index 188329532fe..8d59e6c9b9e 100644 --- a/aider/main.py +++ b/aider/main.py @@ -1554,7 +1554,6 @@ def load_slow_imports(swallow=True): try: import httpx # noqa: F401 import litellm # noqa: F401 - import networkx # noqa: F401 import numpy # noqa: F401 except Exception as e: if not swallow: diff --git a/aider/repomap.py b/aider/repomap.py index e140a5468c6..63a596eade5 100644 --- a/aider/repomap.py +++ b/aider/repomap.py @@ -599,7 +599,7 @@ def get_tags_raw(self, fname, rel_fname): def get_ranked_tags( self, chat_fnames, other_fnames, mentioned_fnames, mentioned_idents, progress=True ): - import networkx as nx + import rustworkx defines = defaultdict(set) references = defaultdict(lambda: defaultdict(int)) @@ -717,7 +717,24 @@ def get_ranked_tags( idents = set(defines.keys()).intersection(set(references.keys())) - G = nx.MultiDiGraph() + G = rustworkx.PyDiGraph(multigraph=True) + + # Collect all unique file names that will be nodes + all_files = set() + for files in defines.values(): + all_files.update(files) + for ref_dict in references.values(): + all_files.update(ref_dict.keys()) + all_files.update(file_imports.keys()) + all_files.update(personalization.keys()) + + # Add all nodes and create mapping from file name to node index + file_to_node = {} + node_to_file = {} + for fname in sorted(all_files): + node_idx = G.add_node(fname) + file_to_node[fname] = node_idx + node_to_file[node_idx] = fname # Add a small self-edge for every definition that has no references # Helps with tree-sitter 0.23.2 with ruby, where "def greet(name)" @@ -728,7 +745,10 @@ def get_ranked_tags( if ident in references: continue for definer in defines[ident]: - G.add_edge(definer, definer, weight=unreferenced_weight, ident=ident) + definer_idx = file_to_node[definer] + G.add_edge( + definer_idx, definer_idx, {"weight": unreferenced_weight, "ident": ident} + ) for ident in idents: if progress: @@ -819,42 +839,54 @@ def get_ranked_tags( path_distance = len(p1) + len(p2) - (2 * common_count) weight = use_mul * 2 ** (-1 * path_distance) - G.add_edge(referencer, definer, weight=weight, key=ident, ident=ident) + referencer_idx = file_to_node[referencer] + definer_idx = file_to_node[definer] + G.add_edge( + referencer_idx, + definer_idx, + {"weight": weight, "key": ident, "ident": ident}, + ) self.io.profile("Build Graph") - if not references: - pass - + self.io.profile("PERSONALIZATION START") + # Convert personalization from file names to node indices if personalization: - pers_args = dict(personalization=personalization, dangling=personalization) + pers_node = {file_to_node[fname]: val for fname, val in personalization.items()} + pers_args = dict(personalization=pers_node, dangling=pers_node) else: pers_args = dict() - + self.io.profile("PERSONALIZATION END") try: - ranked = nx.pagerank(G, weight="weight", **pers_args) + ranked = rustworkx.pagerank(G, weight_fn=lambda edge: edge["weight"], **pers_args) except ZeroDivisionError: # Issue #1536 try: - ranked = nx.pagerank(G, weight="weight") + ranked = rustworkx.pagerank(G, weight_fn=lambda edge: edge["weight"]) except ZeroDivisionError: + self.io.profile("zero") return [] + except Exception as e: + self.io.profile(e) + except Exception as e: + self.io.profile(e) self.io.profile("PageRank") # distribute the rank from each source node, across all of its out edges ranked_definitions = defaultdict(float) - for src in G.nodes: + for src in G.node_indices(): if progress: self.io.update_spinner(f"{UPDATING_REPO_MAP_MESSAGE}: {src}") src_rank = ranked[src] - total_weight = sum(data["weight"] for _src, _dst, data in G.out_edges(src, data=True)) + total_weight = sum(data["weight"] for _src, _dst, data in G.out_edges(src)) # dump(src, src_rank, total_weight) - for _src, dst, data in G.out_edges(src, data=True): + for _src, dst, data in G.out_edges(src): data["rank"] = src_rank * data["weight"] / total_weight ident = data["ident"] - ranked_definitions[(dst, ident)] += data["rank"] + fname = node_to_file[dst] + ranked_definitions[(fname, ident)] += data["rank"] self.io.profile("Distribute Rank") @@ -878,8 +910,9 @@ def get_ranked_tags( fnames_already_included = set(rt[0] for rt in ranked_tags) - top_rank = sorted([(rank, node) for (node, rank) in ranked.items()], reverse=True) - for rank, fname in top_rank: + top_rank = sorted([(rank, node_idx) for (node_idx, rank) in ranked.items()], reverse=True) + for rank, node_idx in top_rank: + fname = node_to_file[node_idx] if fname in rel_other_fnames_without_tags: rel_other_fnames_without_tags.remove(fname) if fname not in fnames_already_included: diff --git a/requirements.txt b/requirements.txt index 75b3f015291..e727e668c4d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -274,13 +274,10 @@ multidict==6.7.0 # -c requirements/common-constraints.txt # aiohttp # yarl -networkx==3.6 - # via - # -c requirements/common-constraints.txt - # -r requirements/requirements.in numpy==2.3.5 # via # -c requirements/common-constraints.txt + # rustworkx # scipy # soundfile openai==2.8.1 @@ -449,6 +446,10 @@ rsa==4.9.1 # via # -c requirements/common-constraints.txt # google-auth +rustworkx==0.17.1 + # via + # -c requirements/common-constraints.txt + # -r requirements/requirements.in scipy==1.16.3 # via # -c requirements/common-constraints.txt diff --git a/requirements/common-constraints.txt b/requirements/common-constraints.txt index 900a557cee1..0c5657a8a4a 100644 --- a/requirements/common-constraints.txt +++ b/requirements/common-constraints.txt @@ -278,7 +278,6 @@ nest-asyncio==1.6.0 # via llama-index-core networkx==3.6 # via - # -r requirements/requirements.in # llama-index-core # torch nltk==3.9.2 @@ -292,6 +291,7 @@ numpy==2.3.5 # llama-index-core # matplotlib # pandas + # rustworkx # scikit-learn # scipy # soundfile @@ -508,6 +508,8 @@ rpds-py==0.29.0 # referencing rsa==4.9.1 # via google-auth +rustworkx==0.17.1 + # via -r requirements/requirements.in safetensors==0.7.0 # via transformers scikit-learn==1.7.2 diff --git a/requirements/requirements.in b/requirements/requirements.in index d5195b87d9c..9dd02eb0265 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -15,7 +15,7 @@ beautifulsoup4>=4.13.4 PyYAML>=6.0.2 diff-match-patch>=20241021 pypandoc>=1.15 -litellm>=1.75.0 +litellm>=1.80.0 flake8>=7.3.0 importlib_resources pyperclip>=1.9.0 @@ -32,19 +32,10 @@ mcp>=1.12.3 textual>=6.0.0 truststore -# The proper dependency is networkx[default], but this brings -# in matplotlib and a bunch of other deps -# https://github.com/networkx/networkx/blob/d7132daa8588f653eacac7a5bae1ee85a183fa43/pyproject.toml#L57 -# We really only need networkx itself and scipy for the repomap. -# -# >3.5 seems to not be available for py3.10 -networkx>=3.4.2 +# Replaced networkx with rustworkx for better performance in repomap +rustworkx>=0.15.0 -# This is the one networkx dependency that we need. -# Including it here explicitly because we -# didn't specify networkx[default] above. -# -# 1.16 onwards only supports python3.11+ +# scipy is still needed for other parts of the codebase scipy>=1.15.3 # GitHub Release action failing on "KeyError: 'home-page'" From de79fc1544ce54a98914125639a92439ae689d85 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 23:02:45 -0500 Subject: [PATCH 33/34] Update agent prompt to encourage the model to act more proactively and yap a bit less --- aider/coders/agent_prompts.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/aider/coders/agent_prompts.py b/aider/coders/agent_prompts.py index 7d0bf390d48..0aa2bb771f6 100644 --- a/aider/coders/agent_prompts.py +++ b/aider/coders/agent_prompts.py @@ -17,9 +17,8 @@ class AgentPrompts(CoderPrompts): ## Core Directives - **Role**: Act as an expert software engineer. - **Act Proactively**: Autonomously use file discovery and context management tools (`ViewFilesAtGlob`, `ViewFilesMatching`, `Ls`, `View`, `Remove`) to gather information and fulfill the user's request. Chain tool calls across multiple turns to continue exploration. -- **Be Decisive**: Do not ask the same question or search for the same term in multiple ways. Trust that your initial findings are valid. -- **Be Concise**: Keep all responses brief and direct (1-3 sentences). Avoid preamble, postamble, and unnecessary explanations. -- **Confirm Ambiguity**: Before applying complex or ambiguous edits, briefly state your plan. For simple, direct edits, proceed without confirmation. +- **Be Decisive**: Trust that your initial findings are valid. Refrain from asking the same question or searching for the same term in multiple similar ways. +- **Be Concise**: Keep all responses brief and direct (1-3 sentences). Avoid preamble, postamble, and unnecessary explanations. Do not repeat yourself. From ce25a037618ad556f85671695cff2df2d05417f2 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 23:39:13 -0500 Subject: [PATCH 34/34] Options Updates: - system more stable; check for updates on launch again - --tui mode is default if linear-output is off and it's not explicitly set false --- aider/args.py | 12 +++++------- aider/main.py | 3 ++- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/aider/args.py b/aider/args.py index 43240ee55bd..1453cca90a1 100644 --- a/aider/args.py +++ b/aider/args.py @@ -246,8 +246,8 @@ def get_parser(default_config_files, git_root): group = parser.add_argument_group("TUI Settings") group.add_argument( "--tui", - action="store_true", - default=False, + action=argparse.BooleanOptionalAction, + default=None, help="Launch Textual TUI interface (experimental)", ) group.add_argument( @@ -718,7 +718,7 @@ def get_parser(default_config_files, git_root): "--check-update", action=argparse.BooleanOptionalAction, help="Check for new aider versions on launch", - default=False, + default=True, ) group.add_argument( "--show-release-notes", @@ -803,10 +803,8 @@ def get_parser(default_config_files, git_root): ) group.add_argument( "--linear-output", - action="store_true", - help=( - "Run input and output sequentially instead of us simultaneous streams (default: False)" - ), + action=argparse.BooleanOptionalAction, + help="Run input and output sequentially instead of us simultaneous streams (default: True)", default=True, ) group.add_argument( diff --git a/aider/main.py b/aider/main.py index 8d59e6c9b9e..a8a2209efec 100644 --- a/aider/main.py +++ b/aider/main.py @@ -762,10 +762,11 @@ def get_io(pretty): # TUI mode - create TUI-specific IO output_queue = None input_queue = None - if args.tui: + if args.tui or (args.tui is None and not args.linear_output): try: from aider.tui import create_tui_io + args.tui = True args.linear_output = True print("Starting aider TUI...", flush=True) io, output_queue, input_queue = create_tui_io(args, editing_mode)