Skip to content

OpenAI

Installation

You need to install openai>=1.0.0 to use this in blendsql.

Environment

In order to use this LLM as a Blender, we expect that you have a .env file created with all auth variables.

OpenaiLLM

Bases: RemoteModel

Class for OpenAI Model API.

Parameters:

Name Type Description Default
model_name_or_path str

Name of the OpenAI model to use

required
env str

Path to directory of .env file, or to the file itself to load as a dotfile. Should contain the variable OPENAI_API_KEY

'.'
config Optional[dict]

Optional argument mapping to use in loading model

None
caching bool

Bool determining whether we access the model's cache

True

Examples:

Given the following .env file in the directory above current:

OPENAI_API_KEY=my_api_key
from blendsql.models import OpenaiLLM

model = OpenaiLLM(
    "gpt-3.5-turbo",
    env="..",
    config={"temperature": 0.7}
)

Source code in blendsql/models/remote/_openai.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
class OpenaiLLM(RemoteModel):
    """Class for OpenAI Model API.

    Args:
        model_name_or_path: Name of the OpenAI model to use
        env: Path to directory of .env file, or to the file itself to load as a dotfile.
            Should contain the variable `OPENAI_API_KEY`
        config: Optional argument mapping to use in loading model
        caching: Bool determining whether we access the model's cache

    Examples:
        Given the following `.env` file in the directory above current:
        ```text
        OPENAI_API_KEY=my_api_key
        ```
        ```python
        from blendsql.models import OpenaiLLM

        model = OpenaiLLM(
            "gpt-3.5-turbo",
            env="..",
            config={"temperature": 0.7}
        )
        ```
    """

    def __init__(
        self,
        model_name_or_path: str,
        env: str = ".",
        config: Optional[dict] = None,
        caching: bool = True,
        **kwargs,
    ):
        if not _has_openai:
            raise ImportError(
                'Please install openai>=1.0.0 and tiktoken with `pip install "openai>=1.0.0" tiktoken`!'
            ) from None

        import tiktoken

        if config is None:
            config = {}
        super().__init__(
            model_name_or_path=model_name_or_path,
            tokenizer=tiktoken.encoding_for_model(model_name_or_path),
            requires_config=True,
            refresh_interval_min=30,
            load_model_kwargs=config | DEFAULT_CONFIG,
            env=env,
            caching=caching,
            **kwargs,
        )

    def _load_model(self) -> ModelObj:
        from guidance.models import OpenAI

        return OpenAI(
            self.model_name_or_path, echo=False, api_key=os.getenv("OPENAI_API_KEY")
        )

    def _setup(self, **kwargs) -> None:
        openai_setup()

AzureOpenaiLLM

Bases: RemoteModel

Class for Azure OpenAI Model API.

Parameters:

Name Type Description Default
model_name_or_path str

Name of the Azure deployment to use

required
env str

Path to directory of .env file, or to the file itself to load as a dotfile. Should either contain the variable OPENAI_API_KEY, or all of TENANT_ID, CLIENT_ID, CLIENT_SECRET

'.'
config Optional[dict]

Optional dict to use in loading model

None
caching bool

Bool determining whether we access the model's cache

True

Examples:

Given the following .env file in the directory above current:

TENANT_ID=my_tenant_id
CLIENT_ID=my_client_id
CLIENT_SECRET=my_client_secret
from blendsql.models import AzureOpenaiLLM

model = AzureOpenaiLLM(
    "gpt-3.5-turbo",
    env="..",
    config={"temperature": 0.7}
)

Source code in blendsql/models/remote/_openai.py
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
class AzureOpenaiLLM(RemoteModel):
    """Class for Azure OpenAI Model API.

    Args:
        model_name_or_path: Name of the Azure deployment to use
        env: Path to directory of .env file, or to the file itself to load as a dotfile.
            Should either contain the variable `OPENAI_API_KEY`,
            or all of `TENANT_ID`, `CLIENT_ID`, `CLIENT_SECRET`
        config: Optional dict to use in loading model
        caching: Bool determining whether we access the model's cache

    Examples:
        Given the following `.env` file in the directory above current:
        ```text
        TENANT_ID=my_tenant_id
        CLIENT_ID=my_client_id
        CLIENT_SECRET=my_client_secret
        ```
        ```python
        from blendsql.models import AzureOpenaiLLM

        model = AzureOpenaiLLM(
            "gpt-3.5-turbo",
            env="..",
            config={"temperature": 0.7}
        )
        ```
    """

    def __init__(
        self,
        model_name_or_path: str,
        env: str = ".",
        config: Optional[dict] = None,
        caching: bool = True,
        **kwargs,
    ):
        if not _has_openai:
            raise ImportError(
                "Please install openai>=1.0.0 with `pip install openai>=1.0.0`!"
            ) from None

        import tiktoken

        if config is None:
            config = {}
        super().__init__(
            model_name_or_path=model_name_or_path,
            tokenizer=tiktoken.encoding_for_model(model_name_or_path),
            requires_config=True,
            refresh_interval_min=30,
            load_model_kwargs=config | DEFAULT_CONFIG,
            env=env,
            caching=caching,
            **kwargs,
        )

    def _load_model(self) -> ModelObj:
        from guidance.models import AzureGuidance

        return AzureGuidance(
            self.model_name_or_path,
            azure_endpoint=os.getenv("OPENAI_API_BASE"),
            api_key=os.getenv("OPENAI_API_KEY"),
        )  # type: ignore

    def _setup(self, **kwargs) -> None:
        openai_setup()