Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Setting up MPC architecture #160

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ FROM ${BASE_IMAGE} as source
WORKDIR ${AMENT_WS}/src

# Copy in source code
COPY src/action/model_predictive_control model_predictive_control
Hasan3773 marked this conversation as resolved.
Show resolved Hide resolved
COPY src/action/local_planning local_planning
COPY src/wato_msgs/sample_msgs sample_msgs

# Scan for rosdeps
Expand Down
3 changes: 1 addition & 2 deletions modules/dev_overrides/docker-compose.action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,10 @@ services:
- ${MONO_DIR}/src/action/local_planning:/home/ament_ws/src/local_planning

model_predictive_control:
<<: *fixuid
Hasan3773 marked this conversation as resolved.
Show resolved Hide resolved
extends:
file: ../docker-compose.action.yaml
service: model_predictive_control
image: "${ACTION_MPC_IMAGE}:build_${TAG}"
command: tail -F anything
volumes:
- ${MONO_DIR}/src/action/model_predictive_control:/home/ament_ws/src/model_predictive_control
Hasan3773 marked this conversation as resolved.
Show resolved Hide resolved
- ${MONO_DIR}/src/action/model_predictive_control:/home/bolty/model_predictive_control
1 change: 0 additions & 1 deletion modules/docker-compose.action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,5 @@ services:
cache_from:
- "${ACTION_MPC_IMAGE}:build_${TAG}"
- "${ACTION_MPC_IMAGE}:build_main"
target: deploy
Hasan3773 marked this conversation as resolved.
Show resolved Hide resolved
image: "${ACTION_MPC_IMAGE}:${TAG}"
command: /bin/bash -c "ros2 launch model_predictive_control model_predictive_control.launch.py"
11 changes: 8 additions & 3 deletions modules/docker-compose.simulation.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,16 @@ services:
image: carlasim/carla:0.9.13
environment:
- DISPLAY=1
- CUDA_VISIBLE_DEVICES=0,1,2
- NVIDIA_VISIBLE_DEVICES=0,1,2
runtime: nvidia
- CUDA_VISIBLE_DEVICES=0
- NVIDIA_VISIBLE_DEVICES=0
restart: always
command: /bin/bash -c "./CarlaUE4.sh -nosound -carla-server -RenderOffscreen -world-port=2000 -quality-level=Low"
deploy:
resources:
reservations:
devices:
- driver: nvidia
capabilities: [gpu]

carla_ros_bridge:
build:
Expand Down
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import numpy as np
import torch


class BoxConstraint:
"""
Bounded constraints lb <= x <= ub as polytopic constraints -Ix <= -b and Ix <= b. np.vstack(-I, I) forms the H matrix from III-D-b of the paper
"""
def __init__(self, lb=None, ub=None, plot_idxs=None):
"""
:param lb: dimwise list of lower bounds.
:param ub: dimwise list of lower bounds.
:param plot_idxs: When plotting, the box itself might be defined in some dimension greater than 2 but we might only want to
plot the workspace variables and so plot_idxs allows us to limit the consideration of plot_constraint_set to those variables.
"""
self.lb = np.array(lb, ndmin=2).reshape(-1, 1)
self.ub = np.array(ub, ndmin=2).reshape(-1, 1)
self.plot_idxs = plot_idxs
self.dim = self.lb.shape[0]
assert (self.lb < self.ub).all(), "Lower bounds must be greater than corresponding upper bound for any given dimension"
self.setup_constraint_matrix()

def __str__(self): return "Lower bound: %s, Upper bound: %s" % (self.lb, self.ub)

def get_random_vectors(self, num_samples):
rand_samples = np.random.rand(self.dim, num_samples)
for i in range(self.dim):
scale_factor, shift_factor = (self.ub[i] - self.lb[i]), self.lb[i]
rand_samples[i, :] = (rand_samples[i, :] * scale_factor) + shift_factor
return rand_samples

def setup_constraint_matrix(self):
dim = self.lb.shape[0]
# Casadi can't do matrix mult with Torch instances but only numpy instead. So have to use the np version of the H and b matrix/vector when
# defining constraints in the opti stack.
self.H_np = np.vstack((-np.eye(dim), np.eye(dim)))
self.H = torch.Tensor(self.H_np)
# self.b = torch.Tensor(np.hstack((-self.lb, self.ub)))
self.b_np = np.vstack((-self.lb, self.ub))
self.b = torch.Tensor(self.b_np)
# print(self.b)
self.sym_func = lambda x: self.H @ np.array(x, ndmin=2).T - self.b

def check_satisfaction(self, sample):
# If sample is within the polytope defined by the constraints return 1 else 0.
# print(sample, np.array(sample, ndmin=2).T, self.sym_func(sample), self.b)
return (self.sym_func(sample) <= 0).all()

def generate_uniform_samples(self, num_samples):
n = int(np.round(num_samples**(1./self.lb.shape[0])))

# Generate a 1D array of n equally spaced values between the lower and upper bounds for each dimension
coords = []
for i in range(self.lb.shape[0]):
coords.append(np.linspace(self.lb[i, 0], self.ub[i, 0], n))

# Create a meshgrid of all possible combinations of the n-dimensions
meshes = np.meshgrid(*coords, indexing='ij')

# Flatten the meshgrid and stack the coordinates to create an array of size (K, n-dimensions)
samples = np.vstack([m.flatten() for m in meshes])

# Truncate the array to K samples
samples = samples[:num_samples, :]

# Print the resulting array
return samples

def clip_to_bounds(self, samples):
return np.clip(samples, self.lb, self.ub)
Loading
Loading