Can anyone explain why the Dirichlet Boundary conditions don't kick in in my dg simulation of the linear scalar wave equation? How can I fix this? Thanks in advance.
My dg code:
from dolfin import *
# Constants
D = 2.0*DOLFIN_PI
lmbda = 0.5*D
l = D/lmbda
omega = -D*D/lmbda
# Prepare a mesh
mesh = IntervalMesh(50, 0.0, D)
# Define function spaces
DGorder = 1
V = FunctionSpace(mesh, "DG", DGorder)
U = FunctionSpace(mesh, "DG", DGorder)
# Set some expressions
uinit = Expression("0")
ubdr = Expression("sin(l*x[0]-omega*t)", l = l, omega = omega, t = 0.0)
# Set boundary conditions
Port = CompiledSubDomain("near(x[0], 0.0)")
bc = DirichletBC(U, ubdr, Port)
# Set initial values
u0 = interpolate(uinit, U)
# Define test and trial functions
v = TestFunction(V)
u = TrialFunction(U)
# Set time stepping parameters
t = 0
dt = 0.001
T = DOLFIN_PI
# Define normals
n = FacetNormal(mesh)
# Set penalty parameter
alpha = 1.
# Central differences for time and spatial discretisation
udot = (u - u0)/dt
uold = (u + u0)*0.5
# Define fluxes on interior and exterior facets
uhat = avg(D*uold) + D*(1.-alpha)*0.5*jump(uold)
uhatbnd = D*uold + D*(1.-alpha)*0.5*uold
# Define variational formulation
F = (udot*v - D*uold*v.dx(0))*dx \
+ uhat * jump(v)*dS \
+ uhatbnd * v*ds
a, L = lhs(F), rhs(F)
# Prepare solution
u = Function(U)
plot(u0)
# The acutal timestepping
while t < T:
ubdr.t = t
solve(a == L, u, bcs = bc)
u0.assign(u)
t += dt
plot(u0)