Skip to content

Commit

Permalink
Merge pull request #271 from conacts/issue-260-cuda-error
Browse files Browse the repository at this point in the history
Issue 260: fixing cuda not available
  • Loading branch information
PromtEngineer authored Jul 26, 2023
2 parents 511afd1 + 36b50f3 commit 85a417d
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, as_completed

import click
import torch
from langchain.docstore.document import Document
from langchain.embeddings import HuggingFaceInstructEmbeddings
from langchain.text_splitter import Language, RecursiveCharacterTextSplitter
Expand Down Expand Up @@ -89,7 +90,7 @@ def split_documents(documents: list[Document]) -> tuple[list[Document], list[Doc
@click.command()
@click.option(
"--device_type",
default="cuda",
default="cuda" if torch.cuda.is_available() else "cpu",
type=click.Choice(
[
"cpu",
Expand Down
2 changes: 1 addition & 1 deletion run_localGPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def load_model(device_type, model_id, model_basename=None):
@click.command()
@click.option(
"--device_type",
default="cuda",
default="cuda" if torch.cuda.is_available() else "cpu",
type=click.Choice(
[
"cpu",
Expand Down

0 comments on commit 85a417d

Please sign in to comment.