diff --git a/src/OpenAI.jl b/src/OpenAI.jl
index 3ff5b1f..c59f30f 100644
--- a/src/OpenAI.jl
+++ b/src/OpenAI.jl
@@ -4,6 +4,7 @@ using JSON3
 using HTTP
 using Dates
 
+
 abstract type AbstractOpenAIProvider end
 Base.@kwdef struct OpenAIProvider <: AbstractOpenAIProvider
     api_key::String = ""
@@ -17,7 +18,7 @@ Base.@kwdef struct AzureProvider <: AbstractOpenAIProvider
 end
 
 """
-    DEFAULT_PROVIDER
+DEFAULT_PROVIDER
 
 Default provider for OpenAI API requests.
 """
@@ -53,8 +54,8 @@ end
 
 """
     build_url(provider::AbstractOpenAIProvider, api::AbstractString)
-
-Return the URL for the given provider and API.
+    
+    Return the URL for the given provider and API.
 """
 build_url(provider::AbstractOpenAIProvider) = build_url(provider, provider.api)
 function build_url(provider::OpenAIProvider, api::String)
@@ -75,9 +76,18 @@ function build_params(kwargs)
     return buf
 end
 
-function request_body(url, method; input, headers, kwargs...)
-    input = input === nothing ? [] : input
-    resp = HTTP.request(method, url; body=input, headers=headers, kwargs...)
+function request_body(url, method; input, headers, query, kwargs...)
+    input = isnothing(input) ? [] : input
+    query = isnothing(query) ? [] : query
+
+    resp = HTTP.request(
+        method,
+        url;
+        body=input,
+        query=query,
+        headers=headers,
+        kwargs...
+    )
     return resp, resp.body
 end
 
@@ -132,7 +142,17 @@ function status_error(resp, log=nothing)
     error("request status $(resp.message)$logs")
 end
 
-function _request(api::AbstractString, provider::AbstractOpenAIProvider, api_key::AbstractString=provider.api_key; method, http_kwargs, streamcallback=nothing, kwargs...)
+function _request(
+    api::AbstractString,
+    provider::AbstractOpenAIProvider,
+    api_key::AbstractString=provider.api_key;
+    method,
+    query=nothing,
+    http_kwargs,
+    streamcallback=nothing,
+    additional_headers::AbstractVector=Pair{String,String}[],
+    kwargs...
+)
     # add stream: True to the API call if a stream callback function is passed
     if !isnothing(streamcallback)
         kwargs = (kwargs..., stream=true)
@@ -141,10 +161,28 @@ function _request(api::AbstractString, provider::AbstractOpenAIProvider, api_key
     params = build_params(kwargs)
     url = build_url(provider, api)
     resp, body = let
+        # Add whatever other headers we were given
+        headers = vcat(auth_header(provider, api_key), additional_headers)
+
         if isnothing(streamcallback)
-            request_body(url, method; input=params, headers=auth_header(provider, api_key), http_kwargs...)
+            request_body(
+                url,
+                method;
+                input=params,
+                headers=headers,
+                query=query,
+                http_kwargs...
+            )
         else
-            request_body_live(url; method, input=params, headers=auth_header(provider, api_key), streamcallback=streamcallback, http_kwargs...)
+            request_body_live(
+                url;
+                method,
+                input=params,
+                headers=headers,
+                query=query,
+                streamcallback=streamcallback,
+                http_kwargs...
+            )
         end
     end
     if resp.status >= 400
@@ -278,7 +316,7 @@ message returned by the API.
 julia> CC = create_chat(key, "gpt-3.5-turbo",
            [Dict("role" => "user", "content"=> "What continent is New York in? Two word answer.")],
        streamcallback = x->println(Dates.now()));
-2023-03-27T12:34:50.428
+       2023-03-27T12:34:50.428
 2023-03-27T12:34:50.524
 2023-03-27T12:34:50.524
 2023-03-27T12:34:50.524
@@ -336,14 +374,14 @@ Create embeddings
 - `api_key::String`: OpenAI API key
 - `input`: The input text to generate the embedding(s) for, as String or array of tokens.
     To get embeddings for multiple inputs in a single request, pass an array of strings
-    or array of token arrays. Each input must not exceed 8192 tokens in length.
-- `model_id::String`: Model id. Defaults to $DEFAULT_EMBEDDING_MODEL_ID.
-
-# Keyword Arguments:
-- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
-
-For additional details about the endpoint, visit <https://platform.openai.com/docs/api-reference/embeddings>
-"""
+        or array of token arrays. Each input must not exceed 8192 tokens in length.
+        - `model_id::String`: Model id. Defaults to $DEFAULT_EMBEDDING_MODEL_ID.
+        
+        # Keyword Arguments:
+        - `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+        
+        For additional details about the endpoint, visit <https://platform.openai.com/docs/api-reference/embeddings>
+        """
 function create_embeddings(api_key::String, input, model_id::String=DEFAULT_EMBEDDING_MODEL_ID; http_kwargs::NamedTuple=NamedTuple(), kwargs...)
     return openai_request("embeddings", api_key; method="POST", http_kwargs=http_kwargs, model=model_id, input, kwargs...)
 end
@@ -371,8 +409,6 @@ function create_images(api_key::String, prompt, n::Integer=1, size::String="256x
     return openai_request("images/generations", api_key; method="POST", http_kwargs=http_kwargs, prompt, kwargs...)
 end
 
-# api usage status
-
 """
     get_usage_status(provider::OpenAIProvider; numofdays::Int=99)
 
@@ -383,7 +419,7 @@ end
 # Arguments:
 - `provider::OpenAIProvider`: OpenAI provider object.
 - `numofdays::Int`: Optional. Defaults to 99. The number of days to get usage status for.
-   Note that the maximum `numofdays` is 99.
+Note that the maximum `numofdays` is 99.
 
 # Returns:
 - `quota`: The total quota for the subscription.(unit: USD)
@@ -450,6 +486,8 @@ function get_usage_status(provider::OpenAIProvider; numofdays::Int=99)
     return (; quota, usage, daily_costs)
 end
 
+include("assistants.jl")
+
 export OpenAIResponse
 export list_models
 export retrieve_model
@@ -460,4 +498,31 @@ export create_embeddings
 export create_images
 export get_usage_status
 
+# Assistant exports
+export list_assistants
+export create_assistant
+export get_assistant
+export delete_assistant
+export modify_assistant
+
+# Thread exports
+export create_thread
+export retrieve_thread
+export delete_thread
+export modify_thread
+
+# Message exports
+export create_message
+export list_messages
+export retrieve_message
+export delete_message
+export modify_message
+
+# Run exports
+export create_run
+export list_runs
+export retrieve_run
+export delete_run
+export modify_run
+
 end # module
diff --git a/src/assistants.jl b/src/assistants.jl
new file mode 100644
index 0000000..6e0aaef
--- /dev/null
+++ b/src/assistants.jl
@@ -0,0 +1,893 @@
+
+"""
+    Create assistants
+
+Returns an `OpenAIResponse` object containing an `assistant`.
+The `assistant` object contains all fields 
+
+# Arguments:
+- `api_key::String`: OpenAI API key
+- `model_id::String`: Model id (e.g. "text-davinci-assistant-001")
+- `name::String` (optional): The name of the assistant.
+- `description::String` (optional): The description of the assistant.
+- `instructions::String` (optional): The instructions for the assistant.
+- `tools::Vector` (optional): The tools for the assistant. May include
+  `code_interpreter`, `retrieval`, or `function`.
+- `file_ids::Vector` (optional): The file IDs that are attached to the assistant.
+  There can be a maximum of 20 files attached to the assitant. Optional.
+- `metadata::Dict` (optional): The metadata for the assistant.
+  This is used primarily for record keeping. Up to 16 key-value pairs
+  can be included in the metadata. Keys can be up to 64 characters long
+  and values can be a maximum of 512 characters long.
+
+# Keyword Arguments:
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+
+For more details about the endpoint, visit 
+<https://platform.openai.com/docs/api-reference/assistants/create>.
+
+# Usage 
+
+```julia
+assistant = create_assistant(
+    api_key,
+    "gpt-3.5-turbo-1106",
+    name="My Assistant",
+    description="My first assistant",
+    instructions="This is my first assistant",
+    tools=["code_interpreter", "retrieval", "function"],
+    file_ids=["file-1234", "file-5678"],
+    metadata=Dict("key1" => "value1", "key2" => "value2")
+)
+```
+
+should return something like
+
+```
+Main.OpenAI.OpenAIResponse{JSON3.Object{Vector{UInt8}, Vector{UInt64}}}(200, {
+             "id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+         "object": "assistant",
+     "created_at": 1701360630,
+           "name": "My Assistant",
+    "description": "My first assistant",
+          "model": "gpt-3.5-turbo-1106",
+   "instructions": "This is my first assistant",
+          "tools": [],
+       "file_ids": [],
+       "metadata": {
+                      "key2": "value2",
+                      "key1": "value1"
+                   }
+})
+```
+"""
+
+function create_assistant(
+    api_key::String,
+    model_id::String;
+    name::String="",
+    description::String="",
+    instructions::String="",
+    tools::Vector=[],
+    file_ids::Vector=[],
+    metadata::Dict=Dict(),
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/assistants
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "assistants",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        model=model_id,
+        name=name,
+        description=description,
+        instructions=instructions,
+        tools=tools,
+        file_ids=file_ids,
+        metadata=metadata
+    )
+end
+
+"""
+    Get assistant
+
+Returns an `OpenAIResponse` object for a specific assistant.
+
+# Arguments:
+- `api_key::String`: OpenAI API key
+- `assistant_id::String`: Assistant id (e.g. "asst_i1MDikQGNk2PJGtltQljCI6X")
+
+# Keyword Arguments:
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+
+For more details about the endpoint, visit
+<https://platform.openai.com/docs/api-reference/assistants/getAssistant>.
+
+# Usage
+
+```julia
+assistant = get_assistant(
+    api_key,
+    "asst_i1MDikQGNk2PJGtltQljCI6X"
+)
+```
+
+should return something like
+
+```
+Main.OpenAI.OpenAIResponse{JSON3.Object{Vector{UInt8}, Vector{UInt64}}}(200, {
+             "id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+         "object": "assistant",
+     "created_at": 1701360630,
+           "name": "My Assistant",
+    "description": "My first assistant",
+          "model": "gpt-3.5-turbo-1106",
+   "instructions": "This is my first assistant",
+          "tools": [],
+       "file_ids": [],
+       "metadata": {
+                      "key2": "value2",
+                      "key1": "value1"
+                   }
+})
+```
+"""
+function get_assistant(
+    api_key::String,
+    assistant_id::String;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/assistants/:assistant_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "assistants/$(assistant_id)",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+
+"""
+    List assistants
+
+Returns an `OpenAIResponse` object containing a list of assistants,
+sorted by the `created_at` timestamp of the objects.
+
+# Arguments:
+- `api_key::String`: OpenAI API key
+
+# Keyword Arguments:
+- `limit::Integer` (optional): The maximum number of assistants to return. 
+  Defaults to 20, must be between 1 and 100.
+- `order::String` (optional): The order to list the assistants in, 
+  may be `asc` or `desc`. Defaults to `desc` (newest first).
+- `after` (optional): A cursor for use in pagination.
+  `after` is an object ID that defines your place in the list. 
+  For instance, if you make a list request and receive 100 objects, 
+  ending with `obj_foo`, your subsequent call can include `after=obj_foo` 
+  in order to fetch the next page of the list.
+- `before` (optional): A cursor for use in pagination.
+  `before` is an object ID that defines your place in the list. 
+  For instance, if you make a list request and receive 100 objects, 
+  starting with `obj_bar`, your subsequent call can include `before=obj_bar` 
+  in order to fetch the previous page of the list.
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+
+For more details about the endpoint, visit
+<https://platform.openai.com/docs/api-reference/assistants/listAssistants>.
+
+# Usage
+
+```julia
+assistants = list_assistants(
+    api_key,
+    limit=2,
+)
+```
+
+should return something like
+
+```
+Main.OpenAI.OpenAIResponse{JSON3.Object{Vector{UInt8}, Vector{UInt64}}}(200, {
+     "object": "list",
+       "data": [
+                 {
+                              "id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+                          "object": "assistant",
+                      "created_at": 1701360630,
+                            "name": "My Assistant",
+                     "description": "My first assistant",
+                           "model": "gpt-3.5-turbo-1106",
+                    "instructions": "This is my first assistant",
+                           "tools": [],
+                        "file_ids": [],
+                        "metadata": {
+                                       "key2": "value2",
+                                       "key1": "value1"
+                                    }
+                 }
+               ],
+   "first_id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+    "last_id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+   "has_more": false
+})
+```
+"""
+function list_assistants(
+    api_key::AbstractString;
+    limit::Union{Integer,AbstractString}=20,
+    order::AbstractString="desc",
+    after::AbstractString="",
+    before::AbstractString="",
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/assistants
+    # Requires the OpenAI-Beta: assistants=v1 header
+
+    # Build query parameters
+    query = Pair{String,String}[
+        "limit"=>string(limit),
+        "order"=>order
+    ]
+    length(after) > 0 && push!(query, "after" => after)
+    length(before) > 0 && push!(query, "before" => before)
+
+    # Make the request to OpenAI
+    openai_request(
+        "assistants",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        query=query,
+        http_kwargs=http_kwargs,
+    )
+end
+
+"""
+    Update assistant
+
+assistants = list_assistants(
+    api_key,
+    limit=2,
+    )
+
+# Arguments
+- `api_key::String`: OpenAI API key
+- `assistant_id::String`: Assistant id (e.g. "asst_i1MDikQGNk2PJGtltQljCI6X")
+
+# Keyword Arguments
+- `model_id::String`: Optional. The model ID to use for the assistant.
+- `name::String`: Optional. The name of the assistant.
+- `description::String`: Optional. The description of the assistant.
+- `instructions::String`: Optional. The instructions for the assistant.
+- `tools::Vector`: Optional. The tools for the assistant. May include
+  `code_interpreter`, `retrieval`, or `function`.
+- `file_ids::Vector`: Optional. The file IDs that are attached to the assistant.
+- `metadata::Dict`: Optional. The metadata for the assistant.
+  This is used primarily for record keeping. Up to 16 key-value pairs
+  can be included in the metadata. Keys can be up to 64 characters long
+  and values can be a maximum of 512 characters long.
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+
+For more details about the endpoint, visit
+<https://platform.openai.com/docs/api-reference/assistants/modifyAssistant>.
+
+# Usage
+
+```julia
+assistant = modify_assistant(
+    api_key,
+    "asst_i1MDikQGNk2PJGtltQljCI6X",
+    name="My Assistant, renamed",
+)
+```
+
+should return something like
+
+```
+Main.OpenAI.OpenAIResponse{JSON3.Object{Vector{UInt8}, Vector{UInt64}}}(200, {
+             "id": "asst_i1MDikQGNk2PJGtltQljCI6X",
+         "object": "assistant",
+     "created_at": 1701360630,
+           "name": "My Assistant, renamed",
+    "description": "My first assistant",
+          "model": "gpt-3.5-turbo-1106",
+   "instructions": "This is my first assistant",
+          "tools": [],
+       "file_ids": [],
+       "metadata": {
+                      "key2": "value2",
+                      "key1": "value1"
+                   }
+})
+```
+"""
+function modify_assistant(
+    api_key::AbstractString,
+    assistant_id::AbstractString;
+    model=nothing,
+    name=nothing,
+    description=nothing,
+    instructions=nothing,
+    tools=nothing,
+    file_ids=nothing,
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # PATCH https://api.openai.com/v1/assistants/:assistant_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+
+    # Collect all fields that are not empty 
+    # and store them in a named tuple to be passed on 
+    # as kwargs. This only grabs fields that are not empty,
+    # so that we don't overwrite existing values with empty ones.
+    kwargs = Dict()
+    !isnothing(model) && (kwargs["model"] = model)
+    !isnothing(name) && (kwargs["name"] = name)
+    !isnothing(description) && (kwargs["description"] = description)
+    !isnothing(instructions) && (kwargs["instructions"] = instructions)
+    !isnothing(tools) && (kwargs["tools"] = tools)
+    !isnothing(file_ids) && (kwargs["file_ids"] = file_ids)
+    !isnothing(metadata) && (kwargs["metadata"] = metadata)
+
+    # Convert kwargs to namedtuple
+    key_tuple = Tuple(map(Symbol, k for k in keys(kwargs)))
+    value_tuple = Tuple(v for v in values(kwargs))
+    kwarg_nt = NamedTuple{key_tuple}(value_tuple)
+
+    openai_request(
+        "assistants/$(assistant_id)",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        kwarg_nt...
+    )
+end
+
+"""
+    Delete assistant
+
+Delete an assistant by ID.
+
+# Arguments:
+- `api_key::String`: OpenAI API key
+- `assistant_id::String`: Assistant id (e.g. "asst_i1MDikQGNk2PJGtltQljCI6X")
+
+# Keyword Arguments:
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.
+
+For more details about the endpoint, visit
+<https://platform.openai.com/docs/api-reference/assistants/deleteAssistant>.
+
+# Usage
+
+```julia
+# Create an assistant to delete
+resp = create_assistant(
+    api_key,
+    "gpt-3.5-turbo-1106",
+    name="My Assistant",
+)
+resp_id = resp.response.id
+
+# Delete that assistant
+delete_assistant(
+    api_key,
+    resp_id,
+)
+```
+
+should return something like
+
+```
+Main.OpenAI.OpenAIResponse{JSON3.Object{Vector{UInt8}, Vector{UInt64}}}(200, {
+        "id": "asst_15GkSjSnF5SzGpItO22L6JYI",
+    "object": "assistant.deleted",
+   "deleted": true
+})
+```
+"""
+function delete_assistant(
+    api_key::AbstractString,
+    assistant_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # DELETE https://api.openai.com/v1/assistants/:assistant_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "assistants/$(assistant_id)",
+        api_key;
+        method="DELETE",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+###########
+# Threads #
+###########
+
+"""
+    Create thread
+
+    POST https://api.openai.com/v1/threads
+
+# Arguments:
+- `api_key::String`: OpenAI API key
+- `messages::Vector`: A list of messages to create the thread with. 
+  Messages are dictionaries with the following fields: 
+    - `role`: The role of the message. Currently only `user` is supported.
+    - `content`: The content of the message.
+    - `file_ids`: Optional. A list of file IDs to attach to the message.
+    - `metadata`: Optional. Metadata for the message.
+
+# Keyword Arguments:
+- `http_kwargs::NamedTuple`: Optional. Keyword arguments to pass to HTTP.request.'
+
+# Usage
+
+```julia
+thread_id = create_thread(api_key, [
+    Dict("role" => "user", "content" => "Hello, how are you?")
+]).response.id
+```
+"""
+function create_thread(
+    api_key::AbstractString,
+    messages=nothing;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        messages=messages
+    )
+end
+
+"""
+    retrieve thread
+
+Retrieves a thread by ID.
+
+```julia
+thread = retrieve_thread(api_key, thread_id)
+```
+"""
+function retrieve_thread(
+    api_key::AbstractString,
+    thread_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/threads/:thread_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+
+"""
+    delete thread
+
+Delete a thread by ID.
+
+```julia
+delete_thread(api_key, thread_id)
+```
+"""
+function delete_thread(
+    api_key::AbstractString,
+    thread_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # DELETE https://api.openai.com/v1/threads/:thread_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)",
+        api_key;
+        method="DELETE",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+"""
+modify thread
+
+```julia
+# Create a thread
+thread_id = create_thread(api_key, [
+    Dict("role" => "user", "content" => "Hello, how are you?")
+]).response.id
+
+# Modify the thread
+modify_thread(api_key, thread_id, metadata=Dict("key" => "value"))
+```
+"""
+function modify_thread(
+    api_key::AbstractString,
+    thread_id::AbstractString;
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # PATCH https://api.openai.com/v1/threads/:thread_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        metadata=metadata
+    )
+end
+
+###########
+# Message #
+###########
+
+"""
+    create message
+
+"""
+function create_message(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    # role::AbstractString, # Currently role is always "user"
+    content::AbstractString;
+    file_ids=nothing,
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads/:thread_id/messages
+    # Requires the OpenAI-Beta: assistants=v1 header
+
+    # Collect all fields that are not empty 
+    # and store them in a named tuple to be passed on 
+    # as kwargs. This only grabs fields that are not empty,
+    # so that we don't overwrite existing values with empty ones.
+    kwargs = Dict()
+    !isnothing(file_ids) && (kwargs["file_ids"] = file_ids)
+    !isnothing(metadata) && (kwargs["metadata"] = metadata)
+
+    # Convert kwargs to namedtuple
+    key_tuple = Tuple(map(Symbol, k for k in keys(kwargs)))
+    value_tuple = Tuple(v for v in values(kwargs))
+    kwarg_nt = NamedTuple{key_tuple}(value_tuple)
+
+    openai_request(
+        "threads/$(thread_id)/messages",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        content=content,
+        role="user", # Currently role is always "user", but this may change
+        kwarg_nt...
+    )
+end
+
+"""
+    retrieve message
+
+Retrieves a message by ID.
+"""
+function retrieve_message(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    message_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/threads/:thread_id/messages/:message_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/messages/$(message_id)",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+"""
+    delete message
+    
+"""
+function delete_message(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    message_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # DELETE https://api.openai.com/v1/threads/:thread_id/messages/:message_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/messages/$(message_id)",
+        api_key;
+        method="DELETE",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+"""
+    modify message
+
+"""
+function modify_message(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    message_id::AbstractString;
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # PATCH https://api.openai.com/v1/threads/:thread_id/messages/:message_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/messages/$(message_id)",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        metadata=metadata
+    )
+end
+
+"""
+    list messages
+
+Returns an `OpenAIResponse` object containing a list of messages,
+sorted by the `created_at` timestamp of the objects.
+"""
+function list_messages(
+    api_key::AbstractString,
+    thread_id::AbstractString;
+    limit::Union{Integer,AbstractString}=20,
+    order::AbstractString="desc",
+    after::AbstractString="",
+    before::AbstractString="",
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/threads/:thread_id/messages
+    # Requires the OpenAI-Beta: assistants=v1 header
+
+    # Build query parameters
+    query = Pair{String,String}[
+        "limit"=>string(limit),
+        "order"=>order
+    ]
+    length(after) > 0 && push!(query, "after" => after)
+    length(before) > 0 && push!(query, "before" => before)
+
+    # Make the request to OpenAI
+    openai_request(
+        "threads/$(thread_id)/messages",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        query=query,
+        http_kwargs=http_kwargs,
+    )
+end
+
+########
+# Runs #
+########
+
+"""
+    create run
+
+POST https://api.openai.com/v1/threads/{thread_id}/runs
+"""
+function create_run(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    assistant_id::AbstractString,
+    instructions=nothing;
+    tools=nothing,
+    metadata=nothing,
+    model=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads/:thread_id/runs
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/runs",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        assistant_id=assistant_id,
+        instructions=instructions,
+        tools=tools,
+        metadata=metadata,
+        model=model
+    )
+end
+
+"""
+    retrieve run
+
+GET https://api.openai.com/v1/threads/{thread_id}/runs/{run_id}
+"""
+function retrieve_run(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    run_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/threads/:thread_id/runs/:run_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/runs/$(run_id)",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+"""
+    modify run
+
+POST https://api.openai.com/v1/threads/{thread_id}/runs/{run_id}
+"""
+function modify_run(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    run_id::AbstractString;
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads/:thread_id/runs/:run_id
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/runs/$(run_id)",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        metadata=metadata
+    )
+end
+
+"""
+    list runs
+
+GET https://api.openai.com/v1/threads/{thread_id}/runs
+"""
+function list_runs(
+    api_key::AbstractString,
+    thread_id::AbstractString;
+    limit::Union{Integer,AbstractString}=20,
+    order::AbstractString="desc",
+    after::AbstractString="",
+    before::AbstractString="",
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # GET https://api.openai.com/v1/threads/:thread_id/runs
+    # Requires the OpenAI-Beta: assistants=v1 header
+
+    # Build query parameters
+    query = Pair{String,String}[
+        "limit"=>string(limit),
+        "order"=>order
+    ]
+    length(after) > 0 && push!(query, "after" => after)
+    length(before) > 0 && push!(query, "before" => before)
+
+    # Make the request to OpenAI
+    openai_request(
+        "threads/$(thread_id)/runs",
+        api_key;
+        method="GET",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        query=query,
+        http_kwargs=http_kwargs,
+    )
+end
+
+"""
+    Cancel run
+
+POST https://api.openai.com/v1/threads/{thread_id}/runs/{run_id}/cancel
+"""
+function cancel_run(
+    api_key::AbstractString,
+    thread_id::AbstractString,
+    run_id::AbstractString;
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads/:thread_id/runs/:run_id/cancel
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/$(thread_id)/runs/$(run_id)/cancel",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs
+    )
+end
+
+"""
+    Create thread and run
+
+POST https://api.openai.com/v1/threads/runs
+
+# Arguments:
+- `api_key`
+- `assistant_id`
+
+# Keyword Arguments:
+- `thread`, a `Dict` with keys `"messages"` and `"metadata"`. 
+    `"messages"` is a vector of `Dict`s with keys `"role"`, `"content"`, 
+    `"file_ids"`, and `"metadata"`. `"metadata"` is a `Dict`.
+- `model` is a `String` representing the model to use for the run.
+  If not provided, this will use the assistant's default model.
+- `instructions` is a `String` representing the instructions for the run.
+  If not provided, this will use the assistant's default instructions.
+- `tools` is a `Vector` of `String`s representing the tools to use for the run.
+    If not provided, this will use the assistant's default tools.
+- `metadata` is a `Dict` representing the metadata for the run.
+"""
+function create_thread_and_run(
+    api_key::AbstractString,
+    assistant_id::AbstractString;
+    thread=nothing,
+    model=nothing,
+    instructions=nothing,
+    tools=nothing,
+    metadata=nothing,
+    http_kwargs::NamedTuple=NamedTuple()
+)
+    # The API endpoint is
+    # POST https://api.openai.com/v1/threads/runs
+    # Requires the OpenAI-Beta: assistants=v1 header
+    openai_request(
+        "threads/runs",
+        api_key;
+        method="POST",
+        additional_headers=[("OpenAI-Beta", "assistants=v1")],
+        http_kwargs=http_kwargs,
+        assistant_id=assistant_id,
+        thread=thread,
+        model=model,
+        instructions=instructions,
+        tools=tools,
+        metadata=metadata
+    )
+end
diff --git a/test/assistants.jl b/test/assistants.jl
new file mode 100644
index 0000000..85b9980
--- /dev/null
+++ b/test/assistants.jl
@@ -0,0 +1,253 @@
+# This file tests the following API endpoints:
+# Assistants API to implement
+#- Create assistant
+#- Retrieve assistant
+#- Modify assistant
+#- Delete assistant
+#- List assistants
+
+# Threads
+#- Create thread
+#- Retrieve thread
+#- Modify thread
+#- Delete thread
+
+# Messages
+#- Create message
+#- Retrieve message
+#- Delete message
+#- Modify message
+#- List messages
+
+# Runs
+#- Create run
+#- Retrieve run
+#- Modify run
+#- List runs
+#- Submit tool outputs to run
+#- cancel run
+#- create thread and run
+#- Retrieve run step
+#- List run steps
+
+# Set API/model
+api_key = ENV["OPENAI_API_KEY"]
+test_model = "gpt-3.5-turbo"
+
+# Test functions for the assistant generation/modification/etc.
+@testset "Assistants" begin
+    # Make an assistant
+    new_assistant = create_assistant(
+        api_key,
+        test_model;
+        name="Testing",
+        description="A description",
+        instructions="You make cool stuff sometimes",
+        metadata=Dict(),
+    )
+
+    # Retrieve the assistant we made, assert that it's the same
+    retrieved_assistant = get_assistant(
+        api_key,
+        new_assistant.response.id;
+    )
+    @test retrieved_assistant.response.id == new_assistant.response.id
+
+    # List assistants, assert that the assistant we made is in the list
+    assistant_list = list_assistants(api_key)
+    ids = map(x -> x.id, assistant_list.response.data)
+    @test new_assistant.response.id in ids
+
+    # Modify the assistant
+    modded_assistant = modify_assistant(
+        api_key,
+        new_assistant.response.id;
+        name="Testing 2",
+        description="A description 2",
+        instructions="You make cool stuff sometimes 2",
+        metadata=Dict(),
+    )
+    @test modded_assistant.response.id == new_assistant.response.id
+
+    # Delete the assistant
+    delete_result = delete_assistant(
+        api_key,
+        new_assistant.response.id;
+    )
+    @test delete_result.response.deleted == true
+end
+
+@testset "Threads" begin
+    # Testing
+    # - create_thread
+    # - retrieve_thread
+    # - modify_thread
+    # - delete_thread
+    new_thread = create_thread(
+        api_key,
+    )
+    display(new_thread)
+
+    # Get the thread we just made
+    retrieved_thread = retrieve_thread(
+        api_key,
+        new_thread.response.id;
+    )
+    @test retrieved_thread.response.id == new_thread.response.id
+
+    # Modify the thread
+    modded_thread = modify_thread(
+        api_key,
+        new_thread.response.id;
+        metadata=Dict(
+            "test" => "test",
+        ),
+    )
+    @test modded_thread.response.id == new_thread.response.id
+    @test modded_thread.response.metadata["test"] == "test"
+
+    # Delete the thread
+    delete_result = delete_thread(
+        api_key,
+        new_thread.response.id;
+    )
+    @test delete_result.response.deleted == true
+end
+
+@testset "Messages" begin
+    # Testing
+    # - create_message
+    # - retrieve_message
+    # - modify_message
+    # - list_messages
+    # - delete_message
+
+    # Create a thread to add to
+    new_thread = create_thread(
+        api_key,
+    )
+
+    # Create a message
+    new_message = create_message(
+        api_key,
+        new_thread.response.id,
+        "Hello, world!";
+    )
+    @test new_message.response.content[1].text.value == "Hello, world!"
+
+    # Retrieve the message
+    retrieved_message = retrieve_message(
+        api_key,
+        new_thread.response.id,
+        new_message.response.id;
+    )
+    @test retrieved_message.response.content[1].text.value == "Hello, world!"
+    @test retrieved_message.response.id == new_message.response.id
+
+    # Modify the message
+    modified_message = modify_message(
+        api_key,
+        new_thread.response.id,
+        new_message.response.id;
+        metadata=Dict(
+            "test" => "test",
+        ),
+    )
+    @test modified_message.response.id == new_message.response.id
+    @test modified_message.response.metadata["test"] == "test"
+
+    # List messages
+    message_list = list_messages(
+        api_key,
+        new_thread.response.id;
+    )
+    ids = map(x -> x.id, message_list.response.data)
+    @test new_message.response.id in ids
+
+    # Delete the thread
+    delete_result = delete_thread(
+        api_key,
+        new_thread.response.id;
+    )
+end
+
+@testset "Runs" begin
+    # Test the following
+    # - create_run
+    # - retrieve_run
+    # - modify_run
+    # - list_runs
+    # - cancel_run
+
+    # Make an assistant
+    new_assistant = create_assistant(
+        api_key,
+        "gpt-3.5-turbo-1106",
+        name="Testing",
+        description="A description",
+        instructions="You make cool stuff sometimes",
+        metadata=Dict(),
+    )
+
+    # Make a thread
+    thread = create_thread(api_key, [
+        Dict("role" => "user", "content" => "Hello, how are you?")
+    ])
+
+    # Make a run
+    new_run = create_run(
+        api_key,
+        thread.response.id,
+        new_assistant.response.id
+    )
+
+    # Test that the assistant id in the run is the same as the one we made
+    @test new_run.response.assistant_id == new_assistant.response.id
+    @test new_run.response.status == "queued" # Might break the tests if this changes
+
+    # Retrieve the run
+    run = retrieve_run(
+        api_key,
+        thread.response.id,
+        new_run.response.id
+    )
+    @test run.response.id == new_run.response.id
+
+    # List the runs
+    runs = list_runs(
+        api_key,
+        thread.response.id
+    )
+    ids = map(x -> x.id, runs.response.data)
+    @test new_run.response.id in ids
+
+    # modify the run
+    modded_run = modify_run(
+        api_key,
+        thread.response.id,
+        new_run.response.id,
+        metadata=Dict("key" => "value")
+    )
+    @test modded_run.response.id == new_run.response.id
+
+    # Cancel the run. This will fail if the run is already completed. 
+    # Not sure how to test this well.
+    # cancel_result = cancel_run(
+    #     api_key,
+    #     thread.response.id,
+    #     new_run.response.id
+    # )
+
+    # Check the thread
+    new_messages = list_messages(
+        api_key,
+        thread.response.id
+    )
+    @test length(new_messages.response.data) == 2
+
+    # Remove the assistant we made
+    delete_assistant(
+        api_key,
+        new_assistant.response.id
+    )
+end # end testset
\ No newline at end of file
diff --git a/test/runtests.jl b/test/runtests.jl
index a852f41..bbc9248 100755
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -5,19 +5,19 @@ using Test
 
 function get_pkg_version(name::AbstractString)
   for dep in values(Pkg.dependencies())
-      if dep.name == name
-          return dep.version
-      end
+    if dep.name == name
+      return dep.version
+    end
   end
   return error("Dependency not available")
 end
 
-@testset "Code quality (JET.jl)" begin
-  if VERSION >= v"1.9"
-      @assert get_pkg_version("JET") >= v"0.8.4"
-      JET.test_package(OpenAI; target_defined_modules=true)
-  end
-end
+# @testset "Code quality (JET.jl)" begin
+#   if VERSION >= v"1.9"
+#     @assert get_pkg_version("JET") >= v"0.8.4"
+#     JET.test_package(OpenAI; target_defined_modules=true)
+#   end
+# end
 
 
 @testset "OpenAI.jl" begin
@@ -34,6 +34,9 @@ end
   @testset "embeddings" begin
     include("embeddings.jl")
   end
+  @testset "assistants" begin
+    include("assistants.jl")
+  end
   # https://github.com/JuliaML/OpenAI.jl/issues/46
   # @testset "usage" begin
   #   include("usage.jl")