|
| 1 | +# CODEOWNERS file for tpu-inference |
| 2 | +# This file defines code ownership for different parts of the repository. |
| 3 | +# Each line is a file pattern followed by one or more owners. |
| 4 | +# Owners are notified when PRs modify code in their areas. |
| 5 | +# |
| 6 | +# Order matters - the last matching pattern takes precedence. |
| 7 | +# Analysis includes full history from tpu_commons and tpu_inference paths. |
| 8 | + |
| 9 | +# Default owners for everything in the repo (fallback) |
| 10 | +* @vipannalla |
| 11 | + |
| 12 | +# CI/CD and Build Configuration |
| 13 | +/.buildkite/ @jcyang43 |
| 14 | +/.github/ @jcyang43 |
| 15 | + |
| 16 | +# Documentation |
| 17 | +/docs/ @bvrockwell |
| 18 | +/README.md @bvrockwell |
| 19 | +/CONTRIBUTING.md @jrplatin @bvrockwell |
| 20 | + |
| 21 | +# Distributed Computing |
| 22 | +/tpu_inference/distributed/ @mrjunwan-lang @xiangxu-google |
| 23 | + |
| 24 | +# Kernel Implementations (Performance-critical) |
| 25 | +/tpu_inference/kernels/ @kyuyeunk @yaochengji @bythew3i |
| 26 | + |
| 27 | +# JAX Model Layers - Attention |
| 28 | +/tpu_inference/layers/jax/ @py4 @bzgoogle @jrplatin @gpolovets1 |
| 29 | +/tpu_inference/layers/vllm/ @hfan @vanbasten23 @lsy323 |
| 30 | + |
| 31 | +# JAX Model Implementations |
| 32 | +/tpu_inference/models/jax/qwen2_5_vl.py @hfan @KWang1998 |
| 33 | +/tpu_inference/models/jax/gpt_oss.py @bzgoogle |
| 34 | +/tpu_inference/models/jax/deepseek_v3.py @bzgoogle |
| 35 | +/tpu_inference/models/vllm/ @hfan @vanbasten23 @lsy323 |
| 36 | + |
| 37 | +# Runner and Execution |
| 38 | +/tpu_inference/runner/ @py4 @xiangxu-google |
| 39 | +/tpu_inference/runner/tpu_jax_runner.py @py4 @xiangxu-google @sixiang-google |
| 40 | +/tpu_inference/runner/persistent_batch_manager.py @py4 @xiangxu-google |
| 41 | +/tpu_inference/runner/speculative_decoding_manager.py @py4 @Lumosis |
| 42 | +/tpu_inference/executors/ @xiangxu-google @mrjunwan-lang |
| 43 | +/tpu_inference/core/ @sixiang-google |
| 44 | + |
| 45 | +# Worker Management |
| 46 | +/tpu_inference/worker/ @py4 @xiangxu-google @lsy323 @vanbasten23 |
| 47 | + |
| 48 | +# Speculative Decoding |
| 49 | +/tpu_inference/spec_decode/ @py4 @Lumosis |
| 50 | + |
| 51 | +# Platform Support |
| 52 | +/tpu_inference/platforms/ @xiangxu-google @lsy323 |
| 53 | + |
| 54 | +# LoRA and Adapters |
| 55 | +/tpu_inference/lora/ @vanbasten23 |
| 56 | +/tpu_inference/runner/lora_utils.py @vanbasten23 |
| 57 | + |
| 58 | +# Docker Configuration |
| 59 | +/docker/ @jrplatin |
0 commit comments