initial commit
This commit is contained in:
458
rustbpe/Cargo.lock
generated
Normal file
458
rustbpe/Cargo.lock
generated
Normal file
@@ -0,0 +1,458 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "castaway"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
|
||||
|
||||
[[package]]
|
||||
name = "compact_str"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a"
|
||||
dependencies = [
|
||||
"castaway",
|
||||
"cfg-if",
|
||||
"itoa",
|
||||
"rustversion",
|
||||
"ryu",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
|
||||
dependencies = [
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-epoch"
|
||||
version = "0.9.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
|
||||
[[package]]
|
||||
name = "dary_heap"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.16.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf04c5ec15464ace8355a7b440a33aece288993475556d461154d7a62ad9947c"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.175"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.101"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.23.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"once_cell",
|
||||
"portable-atomic",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
"pyo3-macros",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.23.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.23.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.12.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
"pyo3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.23.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.23.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
|
||||
|
||||
[[package]]
|
||||
name = "rustbpe"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"compact_str",
|
||||
"dary_heap",
|
||||
"fancy-regex",
|
||||
"indexmap",
|
||||
"log",
|
||||
"pyo3",
|
||||
"pyo3-log",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "static_assertions"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.4+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88a5f4a424faf49c3c2c344f166f0662341d470ea185e939657aaff130f0ec4a"
|
||||
dependencies = [
|
||||
"wit-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen"
|
||||
version = "0.45.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36"
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
15
rustbpe/Cargo.toml
Normal file
15
rustbpe/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "rustbpe"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
dary_heap = "0.3"
|
||||
indexmap = "2.2"
|
||||
fancy-regex = "0.16.1"
|
||||
log = "0.4.28"
|
||||
pyo3 = { version = "0.23.3", features = ["extension-module"] }
|
||||
pyo3-log = "0.12.4"
|
||||
ahash = "0.8.12"
|
||||
rayon = "1.11.0"
|
||||
compact_str = "0.9.0"
|
||||
5
rustbpe/README.md
Normal file
5
rustbpe/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# rustbpe
|
||||
|
||||
> The missing tiktoken training code
|
||||
|
||||
A very lightweight Rust library for training a GPT tokenizer. The issue is that the inference library [tiktoken](https://github.com/openai/tiktoken) is great, but only does inference. Separately, the huggingface [tokenizers](https://github.com/huggingface/tokenizers) library does training, but it is rather bloated and really hard to navigate because it has to support all the different historical baggage of how people dealt with tokenizers over the years. More recently, I also wrote the [minbpe](https://github.com/karpathy/minbpe) library which does both training and inference, but only in inefficient Python. Basically what I really want is a non-fancy, super simple, but still relatively efficient training code for GPT tokenizer (more efficient than minbpe, much cleaner/simpler than tokenizers), and then export the trained vocab for inference with tiktoken. Does that make sense? So here we are. There are more opportunities for optimization here, I just stopped a bit early because unlike minbpe before it, rustbpe is now simple and fast enough, and not a significant bottleneck for nanochat.
|
||||
476
rustbpe/src/lib.rs
Normal file
476
rustbpe/src/lib.rs
Normal file
@@ -0,0 +1,476 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap as StdHashMap;
|
||||
|
||||
use dary_heap::OctonaryHeap;
|
||||
use fancy_regex::Regex;
|
||||
use pyo3::prelude::*;
|
||||
|
||||
use ahash::{AHashMap, AHashSet};
|
||||
use compact_str::CompactString;
|
||||
use rayon::prelude::*;
|
||||
|
||||
// Default GPT-4 style regex pattern for splitting text
|
||||
const GPT4_PATTERN: &str = r"'(?i:[sdmt]|ll|ve|re)|[^\r\n\p{L}\p{N}]?+\p{L}+|\p{N}{1,3}| ?[^\s\p{L}\p{N}]++[\r\n]*|\s*[\r\n]|\s+(?!\S)|\s+";
|
||||
|
||||
type Pair = (u32, u32);
|
||||
|
||||
/// A Byte Pair Encoding tokenizer that matches the GPT-4 style implementation
|
||||
#[pyclass]
|
||||
pub struct Tokenizer {
|
||||
/// Maps pairs of token IDs to their merged token ID
|
||||
pub merges: StdHashMap<Pair, u32>,
|
||||
/// The regex pattern used for text splitting
|
||||
pub pattern: String,
|
||||
/// Compiled regex for efficiency
|
||||
compiled_pattern: Regex,
|
||||
}
|
||||
|
||||
// ------------------------ internal helpers ------------------------
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Word {
|
||||
ids: Vec<u32>,
|
||||
}
|
||||
|
||||
impl Word {
|
||||
#[inline]
|
||||
fn new(ids: Vec<u32>) -> Self {
|
||||
Self { ids }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn pairs<'a>(&'a self) -> impl Iterator<Item = Pair> + 'a {
|
||||
self.ids.windows(2).map(|w| (w[0], w[1]))
|
||||
}
|
||||
|
||||
/// Merge all non-overlapping occurrences of pair -> new_id.
|
||||
/// Returns a small Vec of local pair-count deltas for THIS word only:
|
||||
/// -1 for removed pairs, +1 for newly created pairs.
|
||||
///
|
||||
/// NOTE: this version deliberately avoids a HashMap in the hot loop.
|
||||
fn merge_pair(&mut self, pair: Pair, new_id: u32) -> Vec<(Pair, i32)> {
|
||||
let (a, b) = pair;
|
||||
let n = self.ids.len();
|
||||
if n < 2 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut out: Vec<u32> = Vec::with_capacity(n);
|
||||
let mut deltas: Vec<(Pair, i32)> = Vec::with_capacity(6);
|
||||
|
||||
let mut i = 0;
|
||||
while i < n {
|
||||
if i + 1 < n && self.ids[i] == a && self.ids[i + 1] == b {
|
||||
let left = out.last().copied();
|
||||
let right = if i + 2 < n { Some(self.ids[i + 2]) } else { None };
|
||||
|
||||
// remove old pairs
|
||||
if let Some(x) = left {
|
||||
deltas.push(((x, a), -1));
|
||||
deltas.push(((x, new_id), 1));
|
||||
}
|
||||
deltas.push(((a, b), -1));
|
||||
if let Some(y) = right {
|
||||
deltas.push(((b, y), -1));
|
||||
deltas.push(((new_id, y), 1));
|
||||
}
|
||||
|
||||
// write merged token
|
||||
out.push(new_id);
|
||||
i += 2; // skip 'a' and 'b'
|
||||
} else {
|
||||
out.push(self.ids[i]);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
self.ids = out;
|
||||
deltas
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq)]
|
||||
struct MergeJob {
|
||||
pair: Pair,
|
||||
count: u64,
|
||||
/// set of word indices where this pair may occur and needs processing
|
||||
pos: AHashSet<usize>,
|
||||
}
|
||||
|
||||
impl PartialEq for MergeJob {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.count == other.count && self.pair == other.pair
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for MergeJob {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for MergeJob {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
// Max-heap by count; tie-break to ascending pair order (deterministic)
|
||||
if self.count != other.count {
|
||||
self.count.cmp(&other.count)
|
||||
} else {
|
||||
// ascending order on the pair when counts tie
|
||||
other.pair.cmp(&self.pair)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn count_pairs_parallel(
|
||||
words: &[Word],
|
||||
counts: &[i32],
|
||||
) -> (AHashMap<Pair, i32>, AHashMap<Pair, AHashSet<usize>>) {
|
||||
words
|
||||
.par_iter()
|
||||
.enumerate()
|
||||
.map(|(i, w)| {
|
||||
let mut local_pc: AHashMap<Pair, i32> = AHashMap::new();
|
||||
let mut local_wtu: AHashMap<Pair, AHashSet<usize>> = AHashMap::new();
|
||||
if w.ids.len() >= 2 && counts[i] != 0 {
|
||||
for (a, b) in w.pairs() {
|
||||
*local_pc.entry((a, b)).or_default() += counts[i];
|
||||
local_wtu.entry((a, b)).or_default().insert(i);
|
||||
}
|
||||
}
|
||||
(local_pc, local_wtu)
|
||||
})
|
||||
.reduce(
|
||||
|| (AHashMap::new(), AHashMap::new()),
|
||||
|(mut acc_pc, mut acc_wtu), (pc, wtu)| {
|
||||
for (k, v) in pc {
|
||||
*acc_pc.entry(k).or_default() += v;
|
||||
}
|
||||
for (k, s) in wtu {
|
||||
acc_wtu.entry(k).or_default().extend(s);
|
||||
}
|
||||
(acc_pc, acc_wtu)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// ------------------------ END helpers ------------------------
|
||||
|
||||
impl Tokenizer {
|
||||
|
||||
/// Core incremental BPE training given unique words and their counts.
|
||||
/// `words`: one entry per unique chunk (Vec<u32> of token-ids/bytes).
|
||||
/// `counts`: same length as `words`, count per chunk.
|
||||
fn train_core_incremental(&mut self, mut words: Vec<Word>, counts: Vec<i32>, vocab_size: u32) {
|
||||
assert!(vocab_size >= 256, "vocab_size must be at least 256");
|
||||
let num_merges = vocab_size - 256;
|
||||
log::info!("Starting BPE training: {} merges to compute", num_merges);
|
||||
self.merges.clear();
|
||||
|
||||
// ---- Initial pair_counts and where_to_update (parallel) ----
|
||||
log::info!("Computing initial pair counts from {} unique sequences", words.len());
|
||||
let (mut pair_counts, mut where_to_update) = count_pairs_parallel(&words, &counts);
|
||||
|
||||
// ---- Build heap ----
|
||||
log::info!("Building heap with {} unique pairs", pair_counts.len());
|
||||
let mut heap = OctonaryHeap::with_capacity(pair_counts.len());
|
||||
for (pair, pos) in where_to_update.drain() {
|
||||
let c = *pair_counts.get(&pair).unwrap_or(&0);
|
||||
if c > 0 {
|
||||
heap.push(MergeJob {
|
||||
pair,
|
||||
count: c as u64,
|
||||
pos,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Merge loop ----
|
||||
log::info!("Starting merge loop");
|
||||
let mut merges_done = 0u32;
|
||||
let mut last_log_percent = 0u32;
|
||||
|
||||
while merges_done < num_merges {
|
||||
let Some(mut top) = heap.pop() else { break; };
|
||||
|
||||
// Lazy refresh
|
||||
let current = *pair_counts.get(&top.pair).unwrap_or(&0);
|
||||
if top.count != current as u64 {
|
||||
top.count = current as u64;
|
||||
if top.count > 0 {
|
||||
heap.push(top);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if top.count == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
// Record merge
|
||||
let new_id = 256 + merges_done;
|
||||
self.merges.insert(top.pair, new_id);
|
||||
|
||||
// Merge this pair in all words where it occurs
|
||||
let mut local_pos_updates: AHashMap<Pair, AHashSet<usize>> = AHashMap::new();
|
||||
for &word_idx in &top.pos {
|
||||
// Apply merge to this word and collect pair-count deltas
|
||||
let changes = words[word_idx].merge_pair(top.pair, new_id);
|
||||
// Update global pair counts based on this word's count
|
||||
for (pair, delta) in changes {
|
||||
let delta_total = delta * counts[word_idx];
|
||||
if delta_total != 0 {
|
||||
*pair_counts.entry(pair).or_default() += delta_total;
|
||||
if delta > 0 {
|
||||
local_pos_updates.entry(pair).or_default().insert(word_idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the updated pair counts back to the heap
|
||||
for (pair, pos) in local_pos_updates {
|
||||
let cnt = *pair_counts.get(&pair).unwrap_or(&0);
|
||||
if cnt > 0 {
|
||||
heap.push(MergeJob {
|
||||
pair,
|
||||
count: cnt as u64,
|
||||
pos,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
merges_done += 1;
|
||||
|
||||
// Log progress every 1%
|
||||
let current_percent = (merges_done * 100) / num_merges;
|
||||
if current_percent > last_log_percent {
|
||||
log::info!(
|
||||
"Progress: {}% ({}/{} merges) - Last merge: {:?} -> {} (frequency: {})",
|
||||
current_percent, merges_done, num_merges, top.pair, new_id, top.count
|
||||
);
|
||||
last_log_percent = current_percent;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Finished training: {} merges completed", merges_done);
|
||||
}
|
||||
}
|
||||
|
||||
/// Public methods for the Tokenizer class that will be exposed to Python.
|
||||
#[pymethods]
|
||||
impl Tokenizer {
|
||||
/// Create a new Tokenizer
|
||||
#[new]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
merges: StdHashMap::new(),
|
||||
pattern: String::new(),
|
||||
compiled_pattern: Regex::new("").expect("Empty regex should be valid"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Train from a streaming iterator (parallel ingestion).
|
||||
/// We refill a Rust Vec<String> buffer under the GIL, then release the GIL
|
||||
/// to do the heavy splitting and counting **in parallel** with rayon.
|
||||
#[pyo3(signature = (iterator, vocab_size, buffer_size=8192, pattern=None))]
|
||||
#[pyo3(text_signature = "(self, iterator, vocab_size, buffer_size=8192, pattern=None)")]
|
||||
pub fn train_from_iterator(
|
||||
&mut self,
|
||||
py: pyo3::Python<'_>,
|
||||
iterator: &pyo3::Bound<'_, pyo3::PyAny>,
|
||||
vocab_size: u32,
|
||||
buffer_size: usize,
|
||||
pattern: Option<String>,
|
||||
) -> PyResult<()> {
|
||||
// Use provided pattern or default to GPT-4 pattern
|
||||
let pattern_str = pattern.unwrap_or_else(|| GPT4_PATTERN.to_string());
|
||||
|
||||
// Update the stored pattern and compile it
|
||||
self.pattern = pattern_str.clone();
|
||||
self.compiled_pattern = Regex::new(&pattern_str)
|
||||
.map_err(|e| pyo3::exceptions::PyValueError::new_err(format!("Invalid regex pattern: {}", e)))?;
|
||||
|
||||
// Prepare a true Python iterator object
|
||||
let py_iter: pyo3::Py<pyo3::PyAny> = unsafe {
|
||||
pyo3::Bound::from_borrowed_ptr_or_err(py, pyo3::ffi::PyObject_GetIter(iterator.as_ptr()))?
|
||||
.into()
|
||||
};
|
||||
|
||||
// Global chunk counts
|
||||
let mut counts: AHashMap<CompactString, i32> = AHashMap::new();
|
||||
|
||||
// Temporary buffer we refill under the GIL
|
||||
let mut buf: Vec<String> = Vec::with_capacity(buffer_size);
|
||||
|
||||
log::info!("Processing sequences from iterator (buffer_size: {})", buffer_size);
|
||||
let mut total_sequences = 0u64;
|
||||
|
||||
// Helper: refill `buf` with up to `buffer_size` strings from the Python iterator.
|
||||
// Returns Ok(true) if the iterator is exhausted, Ok(false) otherwise.
|
||||
let refill = |buf: &mut Vec<String>| -> PyResult<bool> {
|
||||
pyo3::Python::with_gil(|py| {
|
||||
buf.clear();
|
||||
let it = py_iter.bind(py);
|
||||
loop {
|
||||
if buf.len() >= buffer_size {
|
||||
return Ok(false);
|
||||
}
|
||||
// next(it)
|
||||
let next_obj = unsafe {
|
||||
pyo3::Bound::from_owned_ptr_or_opt(py, pyo3::ffi::PyIter_Next(it.as_ptr()))
|
||||
};
|
||||
match next_obj {
|
||||
Some(obj) => {
|
||||
let s: String = obj.extract()?;
|
||||
buf.push(s);
|
||||
}
|
||||
None => {
|
||||
if pyo3::PyErr::occurred(py) {
|
||||
return Err(pyo3::PyErr::fetch(py));
|
||||
} else {
|
||||
return Ok(true); // exhausted
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
// Stream ingestion loop: refill under GIL, process without GIL (parallel)
|
||||
loop {
|
||||
let exhausted = refill(&mut buf)?;
|
||||
if buf.is_empty() && exhausted {
|
||||
break;
|
||||
}
|
||||
|
||||
total_sequences += buf.len() as u64;
|
||||
|
||||
let pattern = self.compiled_pattern.clone();
|
||||
let local: AHashMap<CompactString, i32> = py.allow_threads(|| {
|
||||
buf.par_iter()
|
||||
.map(|s| {
|
||||
let mut m: AHashMap<CompactString, i32> = AHashMap::new();
|
||||
for mat in pattern.find_iter(s) {
|
||||
let piece = mat.expect("regex match failed").as_str();
|
||||
*m.entry(CompactString::from(piece)).or_default() += 1;
|
||||
}
|
||||
m
|
||||
})
|
||||
.reduce(
|
||||
|| AHashMap::new(),
|
||||
|mut a, b| {
|
||||
for (k, v) in b {
|
||||
*a.entry(k).or_default() += v;
|
||||
}
|
||||
a
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
// Merge local into global (single-threaded)
|
||||
for (k, v) in local {
|
||||
*counts.entry(k).or_default() += v;
|
||||
}
|
||||
|
||||
if exhausted {
|
||||
break;
|
||||
}
|
||||
}
|
||||
log::info!("Processed {} sequences total, {} unique", total_sequences, counts.len());
|
||||
|
||||
// Materialize words & counts
|
||||
let mut words = Vec::with_capacity(counts.len());
|
||||
let mut cvec = Vec::with_capacity(counts.len());
|
||||
for (chunk, c) in counts.into_iter() {
|
||||
words.push(Word::new(chunk.as_bytes().iter().map(|&b| b as u32).collect()));
|
||||
cvec.push(c);
|
||||
}
|
||||
|
||||
self.train_core_incremental(words, cvec, vocab_size);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Return the regex pattern
|
||||
pub fn get_pattern(&self) -> String {
|
||||
self.pattern.clone()
|
||||
}
|
||||
|
||||
/// Return the mergeable ranks (token bytes -> token id / rank)
|
||||
pub fn get_mergeable_ranks(&self) -> Vec<(Vec<u8>, u32)> {
|
||||
let mut mergeable_ranks = Vec::new();
|
||||
|
||||
// Build vocabulary incrementally from low to high token IDs
|
||||
let mut token_bytes: Vec<Vec<u8>> = (0..256_u32).map(|i| vec![i as u8]).collect();
|
||||
|
||||
for (i, bytes) in token_bytes.iter().enumerate() {
|
||||
mergeable_ranks.push((bytes.clone(), i as u32));
|
||||
}
|
||||
|
||||
// Sort merges by token id (so we can reconstruct bytes progressively)
|
||||
let mut sorted_merges: Vec<_> = self.merges.iter().collect();
|
||||
sorted_merges.sort_by_key(|&(_, &token_id)| token_id);
|
||||
|
||||
for (&pair, &merged_id) in sorted_merges {
|
||||
let (left, right) = pair;
|
||||
let mut merged_bytes = token_bytes[left as usize].clone();
|
||||
merged_bytes.extend(&token_bytes[right as usize]);
|
||||
|
||||
if token_bytes.len() <= merged_id as usize {
|
||||
token_bytes.resize(merged_id as usize + 1, Vec::new());
|
||||
}
|
||||
token_bytes[merged_id as usize] = merged_bytes.clone();
|
||||
|
||||
mergeable_ranks.push((merged_bytes, merged_id));
|
||||
}
|
||||
|
||||
mergeable_ranks
|
||||
}
|
||||
|
||||
/// Encode a string into token IDs
|
||||
pub fn encode(&self, text: &str) -> Vec<u32> {
|
||||
let mut all_ids = Vec::new();
|
||||
|
||||
// Split text using the regex pattern
|
||||
for m in self.compiled_pattern.find_iter(text) {
|
||||
let chunk = m.expect("regex match failed").as_str();
|
||||
|
||||
// Convert chunk to bytes then to u32 IDs
|
||||
let mut ids: Vec<u32> = chunk.bytes().map(|b| b as u32).collect();
|
||||
|
||||
// Apply merges iteratively
|
||||
while ids.len() >= 2 {
|
||||
// Find the best pair to merge
|
||||
let mut best_pair: Option<(usize, Pair, u32)> = None;
|
||||
|
||||
for i in 0..ids.len() - 1 {
|
||||
let pair: Pair = (ids[i], ids[i + 1]);
|
||||
if let Some(&new_id) = self.merges.get(&pair) {
|
||||
if best_pair.is_none() || new_id < best_pair.unwrap().2 {
|
||||
best_pair = Some((i, pair, new_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found a pair to merge, apply it
|
||||
if let Some((idx, _pair, new_id)) = best_pair {
|
||||
ids[idx] = new_id;
|
||||
ids.remove(idx + 1);
|
||||
} else {
|
||||
// No more merges possible
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
all_ids.extend(ids);
|
||||
}
|
||||
|
||||
all_ids
|
||||
}
|
||||
}
|
||||
|
||||
#[pymodule]
|
||||
fn rustbpe(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
pyo3_log::init(); // forwards Rust `log` to Python's `logging`
|
||||
m.add_class::<Tokenizer>()?;
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user