@@ -149,7 +149,7 @@ defmodule Nx.LinAlg do
149149
150150 out = % { tensor | type: output_type , shape: output_shape , names: output_names }
151151
152- Nx . block ( % Nx.Block.Cholesky { } , [ tensor ] , out , fn % Nx.Block.Cholesky { } , t ->
152+ Nx . block ( % Nx.Block.LinAlg. Cholesky { } , [ tensor ] , out , fn % Nx.Block.LinAlg .Cholesky { } , t ->
153153 Nx.LinAlg.Cholesky . cholesky ( t )
154154 end )
155155 |> Nx . vectorize ( vectorized_axes )
@@ -715,7 +715,7 @@ defmodule Nx.LinAlg do
715715 output = Nx . template ( output_shape , output_type )
716716
717717 result =
718- Nx . block ( % Nx.Block.Solve { } , [ a , b ] , output , fn % Nx.Block.Solve { } , a , b ->
718+ Nx . block ( % Nx.Block.LinAlg. Solve { } , [ a , b ] , output , fn % Nx.Block.LinAlg .Solve { } , a , b ->
719719 # Since we have triangular solve, which accepts upper
720720 # triangular matrices with the `lower: false` option,
721721 # we can solve a system as follows:
@@ -1155,7 +1155,8 @@ defmodule Nx.LinAlg do
11551155 names: List . duplicate ( nil , tuple_size ( r_shape ) )
11561156 } }
11571157
1158- Nx . block ( struct! ( Nx.Block.QR , opts ) , [ tensor ] , output , fn % Nx.Block.QR { } = s , t ->
1158+ Nx . block ( struct! ( Nx.Block.LinAlg.QR , opts ) , [ tensor ] , output , fn % Nx.Block.LinAlg.QR { } = s ,
1159+ t ->
11591160 opts = s |> Map . from_struct ( ) |> Map . to_list ( )
11601161 Nx.LinAlg.QR . qr ( t , opts )
11611162 end )
@@ -1402,7 +1403,8 @@ defmodule Nx.LinAlg do
14021403 { % { tensor | names: eigenvals_name , type: output_type , shape: eigenvals_shape } ,
14031404 % { tensor | names: eigenvecs_name , type: output_type , shape: eigenvecs_shape } }
14041405
1405- Nx . block ( struct! ( Nx.Block.Eigh , opts ) , [ tensor ] , output , fn % Nx.Block.Eigh { } , t ->
1406+ Nx . block ( struct! ( Nx.Block.LinAlg.Eigh , opts ) , [ tensor ] , output , fn % Nx.Block.LinAlg.Eigh { } ,
1407+ t ->
14061408 Nx.LinAlg.BlockEigh . eigh ( t , opts )
14071409 end )
14081410 |> Nx . vectorize ( vectorized_axes )
@@ -1523,7 +1525,7 @@ defmodule Nx.LinAlg do
15231525 % { tensor | names: List . duplicate ( nil , rank - 1 ) , type: output_type , shape: s_shape } ,
15241526 % { tensor | names: List . duplicate ( nil , rank ) , type: output_type , shape: v_shape } }
15251527
1526- Nx . block ( struct! ( Nx.Block.SVD , opts ) , [ tensor ] , output , fn % Nx.Block.SVD { } , t ->
1528+ Nx . block ( struct! ( Nx.Block.LinAlg. SVD , opts ) , [ tensor ] , output , fn % Nx.Block.LinAlg .SVD { } , t ->
15271529 Nx.LinAlg.SVD . svd ( t , opts )
15281530 end )
15291531 |> Nx . vectorize ( vectorized_axes )
@@ -1748,7 +1750,7 @@ defmodule Nx.LinAlg do
17481750 % { tensor | type: output_type , shape: l_shape , names: names } ,
17491751 % { tensor | type: output_type , shape: u_shape , names: names } }
17501752
1751- Nx . block ( % Nx.Block.LU { } , [ tensor ] , output , fn % Nx.Block.LU { } , t ->
1753+ Nx . block ( % Nx.Block.LinAlg. LU { } , [ tensor ] , output , fn % Nx.Block.LinAlg .LU { } , t ->
17521754 Nx.LinAlg.LU . lu ( t )
17531755 end )
17541756 |> Nx . vectorize ( vectorized_axes )
@@ -2001,18 +2003,23 @@ defmodule Nx.LinAlg do
20012003 "determinant/1 expects a square tensor, got tensor with shape: #{ inspect ( shape ) } "
20022004 end
20032005
2004- Nx . block ( % Nx.Block.Determinant { } , [ tensor ] , output , fn % Nx.Block.Determinant { } , t ->
2005- case matrix_shape do
2006- [ 2 , 2 ] ->
2007- determinant_2by2 ( t )
2006+ Nx . block (
2007+ % Nx.Block.LinAlg.Determinant { } ,
2008+ [ tensor ] ,
2009+ output ,
2010+ fn % Nx.Block.LinAlg.Determinant { } , t ->
2011+ case matrix_shape do
2012+ [ 2 , 2 ] ->
2013+ determinant_2by2 ( t )
20082014
2009- [ 3 , 3 ] ->
2010- determinant_3by3 ( t )
2015+ [ 3 , 3 ] ->
2016+ determinant_3by3 ( t )
20112017
2012- [ n , n ] ->
2013- determinant_NbyN ( t , batch_shape_n: List . to_tuple ( batch_shape ++ [ n ] ) )
2018+ [ n , n ] ->
2019+ determinant_NbyN ( t , batch_shape_n: List . to_tuple ( batch_shape ++ [ n ] ) )
2020+ end
20142021 end
2015- end )
2022+ )
20162023 end )
20172024 end
20182025
0 commit comments