Skip to content

Instantly share code, notes, and snippets.

@Dimfred
Created February 12, 2026 19:47
Show Gist options
  • Select an option

  • Save Dimfred/576611a505348f2b057a27bec78da50f to your computer and use it in GitHub Desktop.

Select an option

Save Dimfred/576611a505348f2b057a27bec78da50f to your computer and use it in GitHub Desktop.
claude code in zsh for quick command lookup (shellm, shellm you get it?!?!? alskdjfalskdjfa8f7yg94er8)
#!/usr/bin/env zsh
# shellm - ZSH plugin that intercepts @llm prompts and sends them to Claude
# Usage: put @llm anywhere in your command line, e.g.:
# @llm list files by size
# find . @llm only python files modified today
# The response (a shell command) replaces the entire line for you to review/execute.
SHELLM_MODEL="${SHELLM_MODEL:-haiku}"
_shellm_system_prompt='You are a shell command generator. The user describes what they want to do and you respond with ONLY the shell command, nothing else. No explanation, no markdown, no code fences. Just the raw command. The user is running zsh on Linux (Arch).'
_shellm_handle() {
local buffer="$1"
# check if @llm appears anywhere in the line
if [[ "$buffer" != *@llm* ]]; then
return 1
fi
# remove @llm from the line, the rest is the prompt
local prompt="${buffer//@llm/}"
prompt="${prompt## }"
prompt="${prompt%% }"
if [[ -z "$prompt" ]]; then
zle -M "shellm: empty prompt"
return 0
fi
zle -M "shellm: thinking..."
local sys_prompt="${_shellm_system_prompt} Current directory: ${PWD}"
local cmd
cmd=$(claude --print --model "$SHELLM_MODEL" --system-prompt "$sys_prompt" "$prompt" 2>&1)
if [[ $? -ne 0 ]]; then
zle -M "shellm error: ${cmd}"
return 0
fi
# push original buffer to undo stack so esc+u restores it
zle split-undo
BUFFER="$cmd"
CURSOR=${#BUFFER}
zle -M ""
return 0
}
_shellm_accept_line() {
if _shellm_handle "$BUFFER"; then
zle redisplay
else
zle .accept-line
fi
}
zle -N accept-line _shellm_accept_line
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment