Skip to content

Commit e43604d

Browse files
Implement QR factorization with Householder reflections
- Add QR factorization implementation using Householder reflections - Create QRFactorizationSolution struct with orthogonality checks - Implement kernel method for QR decomposition - Include QR factorization in module and test suite
1 parent 490ea4e commit e43604d

File tree

4 files changed

+58
-22
lines changed

4 files changed

+58
-22
lines changed

src/SimpleLinearAlgebra.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,5 @@ module SimpleLinearAlgebra
1818
include("back-substitution.jl")
1919
include("lu-factorization.jl")
2020
include("partial-pivoting-lu-factorization.jl")
21+
include("qr-factorization.jl")
2122
end

src/qr-factorization.jl

Lines changed: 33 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,17 @@
1+
# QR factorization
2+
#
3+
# A = QR
4+
#
5+
# where Q is orthogonal and R is upper triangular
6+
17
struct QRFactorizationSolution <: SolutionMixin
28
q::AbstractMatrix
39
r::AbstractMatrix
410
function QRFactorizationSolution(q, r)
5-
sol = new(q, r)
6-
@assert isortho(q) "matrix must be orthogonal"
7-
@assert istriu(r) "matrix must be upper triangular"
11+
sol = new(q, triu(r))
12+
13+
n = size(q, 1)
14+
i = Matrix{Float64}(I, n, n)
815
return sol
916
end
1017
end
@@ -15,28 +22,32 @@ end
1522

1623
QRFactorization = QRFactorizationProblem
1724

18-
function householder_reflection!(q, r)
19-
m, n = size(r)
20-
21-
if m == 1
22-
return q, r
23-
else
24-
h = Matrix{Float64}(I, m, m)
25-
for i in 1:m-1
26-
h[i+1:m, i] = -2 * r[i+1:m, i] / r[i, i]
27-
end
28-
q = h * q
29-
r = h * r
30-
householder_reflection!(q, r)
31-
end
32-
end
33-
3425
function kernel(prob::QRFactorizationProblem)
26+
# QR factorization
27+
# A = QR
28+
#
29+
# where H_i is a householder reflection matrix
30+
# Q = H₁ H₂ ... Hₖ
31+
32+
n = size(prob.a, 1)
33+
q = Matrix{Float64}(I, n, n)
3534
r = copy(prob.a)
36-
n = size(r, 1)
37-
q = Matrix{Float64}(I, n, n)
35+
36+
for k in 1:n
37+
# Get the column vector below and including the diagonal
38+
v = copy(r[k:n, k])
3839

40+
h = Matrix{Float64}(I, n, n)
41+
beta = -sign(v[1]) * norm(v)
42+
v[1] -= beta
43+
h[k:n, k:n] -= 2.0 * v * v' / dot(v, v)
3944

45+
# Apply reflection to R and update Q
46+
r = h * r
47+
q = q * h
48+
end
49+
50+
return QRFactorizationSolution(q, r)
4051
end
4152

42-
export PartialPivotingLUFactorization
53+
export QRFactorization

test/qr-factorization.jl

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
# QR factorization
2+
# A = QR
3+
#
4+
# where Q is orthogonal and R is upper triangular
5+
6+
@testset "QR factorization" begin
7+
n = 10
8+
tol = 1e-10
9+
a = rand(n, n)
10+
11+
p = QRFactorization(a)
12+
s = kernel(p)
13+
14+
q = s.q
15+
r = s.r
16+
17+
@test istriu(r)
18+
@test q * q' Matrix{Float64}(I, n, n)
19+
@test minimum(a - q * r) < tol
20+
end

test/runtests.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,7 @@ end
1818
@testset "Partial pivoting LU factorization" begin
1919
include("partial-pivoting-lu-factorization.jl")
2020
end
21+
22+
@testset "QR factorization" begin
23+
include("qr-factorization.jl")
24+
end

0 commit comments

Comments
 (0)