Unverified Commit b5381eeb authored by René Fritze's avatar René Fritze
Browse files

[test/demos] fix neural_networks_instationary not skipping on missing fenics

parent 59cd158b
......@@ -9,7 +9,6 @@ from typer import Argument, Option, run
from pymor.basic import *
from pymor.core.config import config
from pymor.core.exceptions import TorchMissing
from pymor.reductors.neural_network import (NeuralNetworkInstationaryReductor,
NeuralNetworkInstationaryStatefreeOutputReductor)
from pymor.tools import mpi
......@@ -33,8 +32,7 @@ def main(
one-dimensional domain. The discretization is based on pyMOR's built-in
functionality.
"""
if not config.HAVE_TORCH:
raise TorchMissing()
config.require("TORCH")
fom, plot_function = create_fom(problem_number, grid_intervals, time_steps)
......@@ -122,6 +120,7 @@ def main(
def create_fom(problem_number, grid_intervals, time_steps):
print('Discretize ...')
if problem_number == 0:
config.require("FENICS")
fom, plot_function = discretize_navier_stokes(grid_intervals, time_steps)
elif problem_number == 1:
problem = burgers_problem()
......
......@@ -174,7 +174,8 @@ DEMO_ARGS = [(f'pymordemos.{a}', b) for (a, b) in DEMO_ARGS]
def _skip_if_no_solver(param):
demo, args = param
from pymor.core.config import config
for solver, package in [('fenics', None), ('ngsolve', None), ('neural_', 'TORCH')]:
for solver, package in [('fenics', None), ('ngsolve', None), ('neural_', 'TORCH'),
('neural_networks_instationary', 'FENICS')]:
package = package or solver.upper()
needs_solver = len([f for f in args if solver in str(f)]) > 0 or demo.find(solver) >= 0
has_solver = getattr(config, f'HAVE_{package}')
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment