Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Old assembly test #48

Draft
wants to merge 19 commits into
base: dev
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@ PartitionedArrays = "5a9dfac6-5c52-46f7-8278-5e2210713be9"
JutulGLMakieExt = "GLMakie"
JutulGraphMakieExt = ["GraphMakie", "NetworkLayout", "LayeredLayouts", "Makie"]
JutulHypreExt = "HYPRE"
JutulKaHyParExt = "KaHyPar"
JutulMakieExt = "Makie"
JutulPartitionedArraysExt = ["PartitionedArrays", "MPI"]
JutulMeshesExt = ["Meshes"]
JutulKaHyParExt = "KaHyPar"
JutulPartitionedArraysExt = ["PartitionedArrays", "MPI"]

[compat]
AlgebraicMultigrid = "0.5.1"
Expand All @@ -70,8 +70,8 @@ Graphs = "1.8.0"
HYPRE = "1.4.0"
ILUZero = "0.2.0"
JLD2 = "0.4.22"
Krylov = "0.9.1"
KaHyPar = "0.3.0"
Krylov = "0.9.1"
LayeredLayouts = "0.2.5"
LinearOperators = "2.3.2"
LoopVectorization = "0.12.115"
Expand All @@ -90,6 +90,7 @@ PrettyTables = "2.0"
ProgressMeter = "1.7.2"
SparsityTracing = "0.2.4"
StaticArrays = "1.5.17"
Statistics = "1"
SymRCM = "0.2.1"
TimerOutputs = "0.5.19"
Tullio = "0.3.4"
Expand Down
1 change: 0 additions & 1 deletion ext/JutulPartitionedArraysExt/krylov.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ function inner_krylov(bsolver, lsolve, simulator, simulators, cfg, b, verbose, a
t_prec = @elapsed P = parray_preconditioner_linear_operator(simulator, lsolve, b)
@tic "communication" consistent!(b) |> wait


max_it = cfg.max_iterations
@tic "solve" Krylov.bicgstab!(
bsolver, op, b,
Expand Down
33 changes: 28 additions & 5 deletions src/conservation/conservation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -503,14 +503,25 @@ function update_half_face_flux_tpfa!(hf_cells::Union{AbstractArray{SVector{N, T}
M = global_map(model.domain)
nc = length(conn_pos)-1
tb = minbatch(model.context, nc)
@tic "flux (cells)" @batch minbatch=tb for c in 1:nc
self = full_cell(c, M)
state_c = new_entity_index(state, self)
update_half_face_flux_tpfa_internal!(hf_cells, eq, state_c, model, dt, flow_disc, conn_pos, conn_data, c)
if false
# Cell by cell version
@tic "flux (cells)" @batch minbatch=tb for c in 1:nc
self = full_cell(c, M)
state_c = new_entity_index(state, self)
update_half_face_flux_tpfa_internal_cell!(hf_cells, eq, state_c, model, dt, flow_disc, conn_pos, conn_data, c)
end
else
# Face by face version
Nf = flow_disc.face_neighborship
nf = length(Nf)
f2hf = flow_disc.face_to_half_face
@tic "flux (cells)" @batch minbatch=tb for f in 1:nf
update_half_face_flux_tpfa_internal_face!(hf_cells, eq, state, model, dt, flow_disc, M, Nf, f2hf, f)
end
end
end

function update_half_face_flux_tpfa_internal!(hf_cells::AbstractArray{T}, eq, state, model, dt, flow_disc, conn_pos, conn_data, c) where T
function update_half_face_flux_tpfa_internal_cell!(hf_cells::AbstractArray{T}, eq, state, model, dt, flow_disc, conn_pos, conn_data, c) where T
start = @inbounds conn_pos[c]
stop = @inbounds conn_pos[c+1]-1
for i in start:stop
Expand All @@ -519,6 +530,18 @@ function update_half_face_flux_tpfa_internal!(hf_cells::AbstractArray{T}, eq, st
end
end

function update_half_face_flux_tpfa_internal_face!(hf_cells::AbstractArray{T}, eq, state, model, dt, flow_disc, M, N, f2hf, f) where T
@inbounds l, r = N[f]
@inbounds l_hf, r_hf = f2hf[f]
internal_face_update(hf_cells, l_hf, l, r, f, 1, eq, new_entity_index(state, l), model, dt, flow_disc, M)
internal_face_update(hf_cells, r_hf, r, l, f, -1, eq, new_entity_index(state, r), model, dt, flow_disc, M)
end

@inline function internal_face_update(hf_cells::AbstractArray{T}, hf, self, other, face, face_sign, eq, state, model, dt, flow_disc, M) where T
self = full_cell(self, M)
@inbounds hf_cells[hf] = face_flux!(zero(T), self, other, face, face_sign, eq, state, model, dt, flow_disc)
end

function update_half_face_flux_tpfa!(hf_faces::AbstractArray{SVector{N, T}}, eq, state, model, dt, flow_disc, ::Faces) where {T, N}
nf = number_of_faces(model.domain)
pr = physical_representation(model.domain)
Expand Down
38 changes: 33 additions & 5 deletions src/conservation/flux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,12 @@ function remap_connection(conn::T, self::I, other::I, face::I) where {T, I<:Inte
return conn
end

struct TwoPointPotentialFlowHardCoded{C, D} <: FlowDiscretization
struct TwoPointPotentialFlowHardCoded{C, D, F} <: FlowDiscretization
gravity::Bool
conn_pos::C
conn_data::D
face_to_half_face::F
face_neighborship::F
end

function TwoPointPotentialFlowHardCoded(grid::JutulMesh)
Expand All @@ -140,7 +142,34 @@ function TwoPointPotentialFlowHardCoded(N::AbstractMatrix, nc = maximum(N))
conn_data = []
face_pos = ones(Int64, nc+1)
end
return TwoPointPotentialFlowHardCoded{typeof(face_pos), typeof(conn_data)}(true, face_pos, conn_data)
f2hf, fn = __generate_hard_coded_hf_map(N, face_pos, conn_data)
return TwoPointPotentialFlowHardCoded{typeof(face_pos), typeof(conn_data), typeof(f2hf)}(true, face_pos, conn_data, f2hf, fn)
end

function __generate_hard_coded_hf_map(N, face_pos, conn_data)
function findface(cell, face)
for fpos = face_pos[cell]:(face_pos[cell+1]-1)
if conn_data[fpos].face == face
return fpos
end
end
error("This should not occur - bad data")
end

nf = size(N, 2)
if nf > 0
fn = map(i -> (N[1, i], N[2, i]), 1:size(N, 2))

f2hf = similar(fn)
for i in eachindex(f2hf)
l, r = fn[i]
f2hf[i] = (findface(l, i), findface(r, i))
end
else
f2hf = Vector{Tuple{Int64, Int64}}() # tuple of positions in half face map
fn = similar(f2hf) # tuple of left, right face
end
return (f2hf, fn)
end

number_of_half_faces(tp::TwoPointPotentialFlowHardCoded) = length(tp.conn_data)
Expand All @@ -166,9 +195,8 @@ function subdiscretization(disc::TwoPointPotentialFlowHardCoded, subg, mapper::F

conn_data = conn_data_subdisc(face_pos, faces, face_pos_global, next_face_pos, conn_data_global::Vector{T}, mapper, nc)
face_pos = next_face_pos
# face_pos = new_offsets
# conn_data = vcat(new_conn...)
return TwoPointPotentialFlowHardCoded{typeof(face_pos), typeof(conn_data)}(has_grav, face_pos, conn_data)
f2hf, fn = __generate_hard_coded_hf_map(N, face_pos, conn_data)
return TwoPointPotentialFlowHardCoded{typeof(face_pos), typeof(conn_data), typeof(f2hf)}(has_grav, face_pos, conn_data, f2hf, fn)
end

function compute_counts_subdisc(face_pos, faces, face_pos_global, conn_data_global, mapper, nc)
Expand Down
1 change: 1 addition & 0 deletions src/core_types/domains.jl
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ function hasentity(d::Union{DataDomain, DiscretizedDomain}, e)
return haskey(d.entities, e)
end


function declare_entities(domain::DataDomain)
return ((entity = key, count = val) for (key, val) in domain.entities)
end
Expand Down
2 changes: 0 additions & 2 deletions src/meshes/cart.jl
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,6 @@ function tpfv_geometry(g::CartesianMesh)
end
end
end


nbnd = number_of_boundary_faces(g)
# Then fix the boundary
boundary_neighbors = Vector{Int}(undef, nbnd)
Expand Down
1 change: 1 addition & 0 deletions test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -237,3 +237,4 @@ end
@test isdir(jutul_output_path())
@test last(splitdir(jutul_output_path("testname"))) == "testname"
end

Loading