/*hackerman.ai#<<<<<<<<<<<<<<*cdefintcurrent_char_index=0*cdefstrcurrent_char*/
/*Pythonwrapper*/staticPyObject*__pyx_pw_9hackerman_5Lexer_12comment_char_1__ge
t__(PyObject*__pyx_v_self);/*proto*/staticPyObject*__pyx_pw_9hackerman_5Lexer_1
2comment_char_1__get__(PyObject*__pyx_v_self){CYTHON_UNUSEDPyObject*const*__pyx
_kwvalues;PyObject*__pyx_r=0;__Pyx_RefNannyDeclarations__Pyx_RefNannySetupConte
xt("__get__(wrapper)",0);__pyx_kwvalues=__Pyx_KwValues_VARARGS(__pyx_args,__pyx
_nargs);__pyx_r=__pyx_pf_9hackerman_5Lexer_12comment_char___get__(((struct__pyx
_obj_9hackerman_Lexer*)__pyx_v_self));/*functionexitcode*/__Pyx_RefNannyFinishC
ontext();return__pyx_r;}staticPyObject*__pyx_pf_9hackerman_5Lexer_12comment_cha
r___get__(CYTHON_UN                    USEDstruct__pyx_obj_9hackerman_Lexer*__p
yx_v_self){PyObject                    *__pyx_r=NULL;__Pyx_RefNannyDeclarations
__Pyx_RefNannySetup                    Context("__get__",1);/*"hackerman.pyx":2
61**@property*defco                    mment_char(self):return"--"#<<<<<<<<<<<<
<<**deftokenize(sel                    f,strtext):*/__Pyx_XDECREF(__pyx_r);__Py
x_INCREF(__pyx_kp_u                    _*/__Pyx_XDECREF(__pyx_r);__Pyx_INCREF(_
_pyx_kp_u_*/__Pyx_X                    DEC260*deflexer_name(self):return"Hacker
manConfig"**@prlexe                    r_name(self):return"HackermanConfig"**@p
rlexer_name(self):r                    eturn"HackermanConfig"**@prlexer_name(se
lf):retu_rlexer_nam                    e(self):return"HackermanConfig"**@prlexe
r_name(self):return                    "HackermanConfig"**@prlexer_name(self):e
ftokenize(self,strt                    ext):#<<<<<<<<<<<<<<*cdefintcurrent_char
_index=0*cdefstrcur                    rent_char*//*Pythonwrapper*/current_char
_index=0*cdefstrcur                    rent_char*//*Pythonwrapper*/current_char
_index=0*cdefstrcur                    rent_char*//*Px_args,Py_ssize_t__pyx_nar
gs,PyObject*__pyx_k                    wds#elsePyObject*__pyx_args,PyObject*__p
yx_kwds#endif);/*pr                    oto*/staticPyMethodDef__pyx_mdef_9hacker
man_5Lexer_1tokeniz                    e={"tokenize",(PyCFunction)(void*)(__Pyx
_PyCFunctio={"token                    ize",(PyCFunction)(void*)(__Pyx_PyCFunct
io={"tokenize",(PyC                    Function)(void*)(__Pyx_PyCFunctio={"toke
nize",(Py_pw_9hacke                    rman_5Lexer_1tokenize(PyObject*__pyx_v_s
elf,#ifCYTHON_METH_                    FASTCALLPyObject*const*__pyx_args,Py_ssi
ze_t__pyx_nargs,PyO                    bject*__pyx_kwds#elsePyObject*__pyx_args
,PyObject*__pyx_kwd                    s#endif){PyObject*__pyx_v_text=0;#if!CYT
HON_METH_FASTCALLCY                    THON_UNUSEDPy_ssize_t__pyx_nargs;#endifC
YTHON_UNUSEDPyObjec                    t*const*__pyx_kwvalues;PyObject*values[1
]={0};int__pyx_line                    no=0;constchar*__pyx_filename=NULL;int__
pyx_clineno=0;PyObj                    ect*__pyx_r=0;__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("tokenize(wrapper)",0);#if!CYTHON_METH_FASTCALL#ifCY
THON_ASSUME_SAFE_MACROS__pyx_nargs=PyTuple_GET_SIZE(__pyx_args);#else__pyx_narg
s=PyTuple_Size(__pyx_args);if(unliPyTuple_Size(__pyx_args);if(unliPyTuple_Size(
__pyx_args);if(unliPyTuple_Size(__pyx_args);if_args,__pyx_nargs);{PyObject**__p
yx_pyargnames[]={&__pyx_n_s_text,0};if(__pyx_kwds){Py_ssize_tkw_args;switch(__p
yx_nargs){case1:values[0]=__Pyx_Arg_FASTCALL(__pyx_args,0);CYTHON_FALLTHROUGH;c
ase0:break;default:goto__pyx_L5_argtuple_e__pyx_L5_argtuple_e__pyx_L5_argtuple_
e__pyx_L5_argtuple_e__pyx_L5_argtuple_e__pyx_L5_argtuple_e__pyx_L5_argtuple_e__
pyly((v__pyx_L5_argtuple_e__pyx_L5_argtuple_e__pyx_L5_argtuple_e__pyx_L5_argtup
            
Launching for MacOS in 2025

Hackerman Text, the AI-native code editor

Buy once, own forever

Free to use for personal or evaluation purposes.

Waitlist Features

Hackerman Text is built on a simple principle: make typing feel instant, then stay out of your way.

Non-essential features are on-demand by default. There are no distracting squiggles.

Most things behave exactly as you expect, with minimal side effects. Each tab holds a single file, and any tab can be split once. You can keep many tabs open in a window, and many windows open at once. Every view stays in sync and always shows the latest version of your files.

Your configuration lives in plain text, so it can be version-controlled, shared across machines, or stored in the cloud.

Local-first AI (opt-in)

Zero-latency typing (<10ms)

No telemetry, no sign in

No subscription required

Download links will appear here when available.

Simple and fast

Hackerman Text ships with built-in, native-level lexers tuned for speed. There are currently no plans to add heavier, language-aware tooling.

Inline AI chat

Edit or delete context as plain text, or use Markdown for extra structure. Chats can be saved to disk, version controlled, and resumed later in any editor.

Download Ollama

Custom Bindable Functions

Write your own editor commands in a small, well-defined subset of Python. Functions hot-reload from your scripts file, show up in the function explorer, and can be bound to any key, so your custom workflows feel like built-in features.

Editor roadmap

// done

Core editor features (multi-cursor, multiple selections)

Everything as text files

Multiple views, single document

Create your own themes

Local LLMs

Copilot-like code completion

Project-wide search

Keybind custom functions

Emacs-like org mode for notes

Zero-latency typing (<10ms avg.)

// almost ready...

Bring your own lexer

Support for mix-and-match LLMs (local and hosted)

Inline shell, eval, and AI chat

Code scroller (minimap)

// todo

Context-aware autocomplete (AI based)

Remote development (SSH)

Virtual buffers for very large files (10M+ lines)

Sublime-like performance

Model support

Local model support via Ollama, plus out-of-the-box integration with OpenAI and Mistral, with more providers planned over time.

-- code completion (fill-in-the-middle)

code_completion

ollama, qwen2.5-coder:1.5b

-- ollama, qwen2.5-coder:7b

-- mistral, codestral, ...

-- code instruction (selection as prompt)

code_instruction

openai, gpt-5.1-chat-latest, ...

-- inline chat

chat

openai, gpt-5.1-chat-latest, ...

Programming language support

Assembly

Bash

C

C++

C#

CSS

Cython

D

Dart

Fortran

Go

Haskell

HTML

JavaScript

Kotlin

LaTeX

Lisp

Lua

Makefile

Markdown

Mojo

Nim

OCaml

Odin

Pascal

Perl

PHP

PowerShell

Prolog

Python

R

Ruby

Rust

Scala

Swift

TypeScript

Verilog

Zig

Jai

Turn your late-night ideas into morning commits.

Hackerman Text

Launching for MacOS in 2025