- addmm
- bmm
- matmul
- mm
- cumsum
- linear
- dropout
- gelu
- layer_norm
- native_dropout
- native_layer_norm
- relu
- silu
- softmax
- triu
- abs
- add
- div
- exp
- mul
- pow
- reciprocal
- rsqrt
- rsub
- sub
- mean
- mv
- all
- any
- bitwise_and
- bitwise_not
- bitwise_or
- cos
- eq
- ge
- gt
- isinf
- isnan
- le
- lt
- ne
- neg
- or
- sin
- tanh
- amax
- argmax
- clamp
- max
- min
- outer
- prod
- sum
- var_mean
- vector_norm
- cross_entropy_loss
- group_norm
- log_softmax
- native_group_norm
- sigmoid
- _conv_depthwise2d
- _convolution
- conv1d
- conv2d
- convolution
- cudnn_convolution
- multinomial
- nonzero
- normal
- rand
- rand_like
- randn
- topk
- uniform_
- _efficient_attention_forward
- _scaled_dot_product_efficient_attention
- embedding
- nll_loss
- nll_loss_forward
- nll_loss_nd
- scaled_dot_product_attention
- upsample_nearest2d
- _fft_c2r
- _fft_r2c
- conj_physical
- erf
- fft_irfft
- fft_rfft
- resolve_conj
- resolve_neg
- arange
- cat
- chunk
- chunk
- concat
- constant_pad_nd
- contiguous
- copy_
- fill
- flip
- full
- full_like
- gather
- hstack
- index_put_
- index_select
- masked_fill
- narrow
- ones
- pad
- permute
- repeat
- repeat_interleave
- resize
- scatter
- select
- select_scatter
- slice
- slice_scatter
- sort
- split
- split_with_sizes
- stack
- tile
- transpose
- unfold
- where
- zeros
- rms_norm
- skip_layernorm
- skip_rmsnorm
- apply_rotary_position_embedding
- silu_and_mul
- gelu_and_mul