Skip to content

Commit

Permalink
Merge pull request #44 from atomiechen/ft_import
Browse files Browse the repository at this point in the history
Feature: improve imports for better IDE support
  • Loading branch information
atomiechen authored Aug 6, 2024
2 parents c62c088 + 79dee65 commit 456427f
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 13 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
.DS_Store

dist
build
*.egg-info

# credentials
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,6 @@ addopts = [
]

[tool.ruff.lint.per-file-ignores]
# ref: https://github.com/astral-sh/ruff/issues/2407#issuecomment-1974783543
# Ignore unused imports and import * in __init__.py files
"__init__.py" = ["F401", "F403"]
# # ref: https://github.com/astral-sh/ruff/issues/2407#issuecomment-1974783543
# # Ignore unused imports and import * in __init__.py files
# "__init__.py" = ["F401", "F403"]
48 changes: 38 additions & 10 deletions src/handyllm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,38 @@
from .openai_client import *
from .requestor import *
from .openai_api import *
from .endpoint_manager import *
from .prompt_converter import *
from .utils import *
from .types import *
from .hprompt import *
from .cache_manager import *
from .response import *
from .openai_client import OpenAIClient as OpenAIClient, ClientMode as ClientMode
from .requestor import (
Requestor as Requestor,
DictRequestor as DictRequestor,
BinRequestor as BinRequestor,
ChatRequestor as ChatRequestor,
CompletionsRequestor as CompletionsRequestor,
)
from .openai_api import OpenAIAPI as OpenAIAPI
from .endpoint_manager import EndpointManager as EndpointManager, Endpoint as Endpoint
from .prompt_converter import PromptConverter as PromptConverter
from .utils import (
stream_chat_all as stream_chat_all,
stream_chat as stream_chat,
stream_completions as stream_completions,
astream_chat_all as astream_chat_all,
astream_chat as astream_chat,
astream_completions as astream_completions,
stream_to_file as stream_to_file,
astream_to_file as astream_to_file,
VM as VM,
)
from .hprompt import (
HandyPrompt as HandyPrompt,
ChatPrompt as ChatPrompt,
CompletionsPrompt as CompletionsPrompt,
loads as loads,
load as load,
load_from as load_from,
dumps as dumps,
dump as dump,
dump_to as dump_to,
load_var_map as load_var_map,
RunConfig as RunConfig,
RecordRequestMode as RecordRequestMode,
CredentialType as CredentialType,
)
from .cache_manager import CacheManager as CacheManager
Empty file added src/handyllm/py.typed
Empty file.
9 changes: 9 additions & 0 deletions src/handyllm/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,15 @@
"CompletionsResponse",
"CompletionsChunkChoice",
"CompletionsChunk",
"Function",
"ToolCall",
"TopLogProbItem",
"LogProbItem",
"Logprobs",
"Usage",
"ToolCallDelta",
"ChatChunkDelta",
"CompletionLogprobs",
]

from typing import List, MutableMapping, Optional, Sequence, TypedDict
Expand Down

0 comments on commit 456427f

Please sign in to comment.