@@ -19,8 +19,9 @@ using RuntimeGeneratedFunctions
1919using Statistics
2020using ArrayInterface
2121import Optim
22- using Symbolics: wrap, unwrap, arguments, operation
23- using SymbolicUtils
22+ using Symbolics: wrap, unwrap, arguments, operation, symtype, @arrayop, Arr
23+ using SymbolicUtils. Code
24+ using SymbolicUtils: Prewalk, Postwalk, Chain
2425using AdvancedHMC, LogDensityProblems, LinearAlgebra, Functors, MCMCChains
2526using MonteCarloMeasurements: Particles
2627using ModelingToolkit: value, nameof, toexpr, build_expr, expand_derivatives, Interval,
@@ -32,7 +33,9 @@ using SciMLBase: @add_kwonly, parameterless_type
3233using UnPack: @unpack
3334import ChainRulesCore, Lux, ComponentArrays
3435using Lux: FromFluxAdaptor, recursive_eltype
35- using ChainRulesCore: @non_differentiable
36+ using ChainRulesCore: @non_differentiable, @ignore_derivatives
37+ using PDEBase: AbstractVarEqMapping, VariableMap, cardinalize_eqs!, get_depvars,
38+ get_indvars, differential_order
3639
3740RuntimeGeneratedFunctions. init(@__MODULE__)
3841
@@ -41,13 +44,15 @@ abstract type AbstractPINN end
4144abstract type AbstractTrainingStrategy end
4245
4346include(" pinn_types.jl" )
47+ include(" eq_data.jl" )
4448include(" symbolic_utilities.jl" )
4549include(" training_strategies.jl" )
4650include(" adaptive_losses.jl" )
4751include(" ode_solve.jl" )
4852# include("rode_solve.jl")
4953include(" dae_solve.jl" )
5054include(" transform_inf_integral.jl" )
55+ include(" loss_function_generation.jl" )
5156include(" discretize.jl" )
5257include(" neural_adapter.jl" )
5358include(" advancedHMC_MCMC.jl" )
0 commit comments