Skip to content

Commit e57223b

Browse files
committed
fix formatting
1 parent 1deb196 commit e57223b

File tree

7 files changed

+30
-15
lines changed

7 files changed

+30
-15
lines changed

src/frontend/fit/summary.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,8 +160,7 @@ function sem_summary(
160160

161161
var_array = reduce(
162162
hcat,
163-
check_round(partable.columns[c][var_indices]; digits = digits) for
164-
c in var_columns
163+
check_round(partable.columns[c][var_indices]; digits = digits) for c in var_columns
165164
)
166165
var_columns[2] = Symbol("")
167166

src/frontend/specification/ParameterTable.jl

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -235,8 +235,11 @@ sort_vars(partable::ParameterTable) = sort_vars!(deepcopy(partable))
235235
# add a row --------------------------------------------------------------------------------
236236

237237
function Base.push!(partable::ParameterTable, d::Union{AbstractDict{Symbol}, NamedTuple})
238-
issetequal(keys(partable.columns), keys(d)) ||
239-
throw(ArgumentError("The new row needs to have the same keys as the columns of the parameter table."))
238+
issetequal(keys(partable.columns), keys(d)) || throw(
239+
ArgumentError(
240+
"The new row needs to have the same keys as the columns of the parameter table.",
241+
),
242+
)
240243
for (key, val) in pairs(d)
241244
push!(partable.columns[key], val)
242245
end

src/imply/RAM/generic.jl

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,11 @@ function RAM(;
144144
# μ
145145
if meanstructure
146146
has_meanstructure = Val(true)
147-
!isnothing(M_indices) || throw(ArgumentError("You set `meanstructure = true`, but your model specification contains no mean parameters."))
147+
!isnothing(M_indices) || throw(
148+
ArgumentError(
149+
"You set `meanstructure = true`, but your model specification contains no mean parameters.",
150+
),
151+
)
148152
∇M = gradient ? matrix_gradient(M_indices, n_var) : nothing
149153
μ = zeros(n_obs)
150154
else

src/imply/abstract.jl

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ nparams(imply::SemImply) = nparams(imply.ram_matrices)
1212

1313
function check_acyclic(A::AbstractMatrix)
1414
# check if the model is acyclic
15-
acyclic = isone(det(I-A))
15+
acyclic = isone(det(I - A))
1616

1717
# check if A is lower or upper triangular
1818
if istril(A)
@@ -23,8 +23,9 @@ function check_acyclic(A::AbstractMatrix)
2323
return UpperTriangular(A)
2424
else
2525
if acyclic
26-
@info "Your model is acyclic, specifying the A Matrix as either Upper or Lower Triangular can have great performance benefits.\n" maxlog=1
26+
@info "Your model is acyclic, specifying the A Matrix as either Upper or Lower Triangular can have great performance benefits.\n" maxlog =
27+
1
2728
end
2829
return A
2930
end
30-
end
31+
end

src/loss/ML/FIML.jl

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,8 @@ function gradient!(semfiml::SemFIML, params, model)
100100

101101
prepare_SemFIML!(semfiml, model)
102102

103-
gradient = ∇F_FIML(pattern_rows(observed(model)), semfiml, model) / nsamples(observed(model))
103+
gradient =
104+
∇F_FIML(pattern_rows(observed(model)), semfiml, model) / nsamples(observed(model))
104105
return gradient
105106
end
106107

@@ -112,8 +113,10 @@ function objective_gradient!(semfiml::SemFIML, params, model)
112113
prepare_SemFIML!(semfiml, model)
113114

114115
objective =
115-
F_FIML(pattern_rows(observed(model)), semfiml, model, params) / nsamples(observed(model))
116-
gradient = ∇F_FIML(pattern_rows(observed(model)), semfiml, model) / nsamples(observed(model))
116+
F_FIML(pattern_rows(observed(model)), semfiml, model, params) /
117+
nsamples(observed(model))
118+
gradient =
119+
∇F_FIML(pattern_rows(observed(model)), semfiml, model) / nsamples(observed(model))
117120

118121
return objective, gradient
119122
end

src/observed/EM.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ function em_mvn(
6262

6363
if iter > max_iter_em
6464
done = true
65-
@warn "EM Algorithm for MVN missing data did not converge. Likelihood for FIML is not interpretable.
65+
@warn "EM Algorithm for MVN missing data did not converge. Likelihood for FIML is not interpretable.
6666
Maybe try passing different starting values via 'start_em = ...' "
6767
elseif iter > 1
6868
# done = isapprox(ll, ll_prev; rtol = rtol)
@@ -153,7 +153,8 @@ end
153153

154154
# use μ and Σ of full cases
155155
function start_em_observed(observed::SemObservedMissing; kwargs...)
156-
if (length(observed.patterns[1]) == nobserved_vars(observed)) & (observed.pattern_nsamples[1] > 1)
156+
if (length(observed.patterns[1]) == nobserved_vars(observed)) &
157+
(observed.pattern_nsamples[1] > 1)
157158
μ = copy(observed.obs_mean[1])
158159
Σ = copy(Symmetric(observed.obs_cov[1]))
159160
if !isposdef(Σ)

test/examples/political_democracy/by_parts.jl

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,12 @@ model_ridge = Sem(observed, imply_ram, SemLoss(ml, ridge), optimizer_obj)
4040

4141
model_constant = Sem(observed, imply_ram, SemLoss(ml, constant), optimizer_obj)
4242

43-
model_ml_weighted =
44-
Sem(observed, imply_ram, SemLoss(ml; loss_weights = [nsamples(model_ml)]), optimizer_obj)
43+
model_ml_weighted = Sem(
44+
observed,
45+
imply_ram,
46+
SemLoss(ml; loss_weights = [nsamples(model_ml)]),
47+
optimizer_obj,
48+
)
4549

4650
############################################################################################
4751
### test gradients

0 commit comments

Comments
 (0)