Skip to content

Execute a BlendSQL Query

BlendSQL Class

Core BlendSQL class that provides high level interface for executing BlendSQL queries.

Parameters:

Name Type Description Default
db Union[DataFrame, dict, str, Database]

Database to connect to. Can be:

  • pandas DataFrame or dict of DataFrames

  • Path to SQLite database file

  • PostgreSQL connection string

  • Database object

None
model Optional[Model]

Model instance to use for LLM operations. Can also be provided during query execution.

None
ingredients Optional[Collection[Type[Ingredient]]]

Collection of ingredients to make available for queries. Can also be provided during query execution.

list()
verbose bool

Whether to output debug logging information. Defaults to False.

False
infer_gen_constraints bool

Whether to automatically infer constraints for LLM generation based on query context. Defaults to True.

True
table_to_title Optional[Dict[str, str]]

Optional mapping from table names to descriptive titles, useful for datasets where table titles contain metadata.

None
Source code in blendsql/blendsql.py
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
@dataclass
class BlendSQL:
    """Core `BlendSQL` class that provides high level interface for executing BlendSQL queries.

    Args:
        db (Union[pd.DataFrame, dict, str, Database]): Database to connect to. Can be:

            - pandas DataFrame or dict of DataFrames

            - Path to SQLite database file

            - PostgreSQL connection string

            - `Database` object
        model (Optional[Model]): Model instance to use for LLM operations. Can also be
            provided during query execution.
        ingredients (Optional[Collection[Type[Ingredient]]]): Collection of ingredients to
            make available for queries. Can also be
                provided during query execution.
        verbose (bool): Whether to output debug logging information. Defaults to False.
        infer_gen_constraints (bool): Whether to automatically infer constraints for
            LLM generation based on query context. Defaults to True.
        table_to_title (Optional[Dict[str, str]]): Optional mapping from table names to
            descriptive titles, useful for datasets where table titles contain metadata.
    """

    db: t.Union[pd.DataFrame, dict, str, Database] = field(default=None)
    model: t.Optional[Model] = field(default=None)
    ingredients: t.Optional[Collection[t.Type[Ingredient]]] = field(
        default_factory=list
    )

    verbose: bool = field(default=False)
    infer_gen_constraints: bool = field(default=True)
    table_to_title: t.Optional[t.Dict[str, str]] = field(default=None)

    def __post_init__(self):
        if not isinstance(self.db, Database):
            self.db = self._infer_db_type(self.db)
        if self.db is None:
            raise ValueError("df_or_db_path must be provided")
        self.ingredients = self._merge_default_ingredients(self.ingredients)
        self._toggle_verbosity(self.verbose)

    @staticmethod
    def _toggle_verbosity(verbose_in_use: bool):
        def set_level(l: int):
            logger.setLevel(l)
            for handler in logger.handlers:
                handler.setLevel(l)

        if verbose_in_use:
            set_level(logging.DEBUG)
        else:
            set_level(logging.ERROR)

    @staticmethod
    def _merge_default_ingredients(
        ingredients: t.Optional[Collection[t.Type[Ingredient]]],
    ):
        from blendsql.ingredients import LLMQA, LLMMap, LLMJoin

        DEFAULT_INGREDIENTS = {LLMQA, LLMMap, LLMJoin}
        ingredients = set(ingredients)
        try:
            ingredient_names = {i.__name__ for i in ingredients}
        except AttributeError as e:
            raise IngredientException(
                "All arguments passed to `ingredients` should be `Ingredient` classes!"
            ) from e
        for default_ingredient in DEFAULT_INGREDIENTS:
            if default_ingredient.__name__ not in ingredient_names:
                ingredients.add(default_ingredient)
        return ingredients

    @staticmethod
    def _infer_db_type(df_or_db_path) -> Database:
        from pathlib import Path

        if df_or_db_path is None:
            from .db.pandas import Pandas

            return Pandas({})  # Load an empty DuckDB connection

        elif isinstance(df_or_db_path, (pd.DataFrame, dict)):
            from .db.pandas import Pandas

            return Pandas(df_or_db_path)
        elif isinstance(df_or_db_path, (str, Path)):
            if Path(df_or_db_path).exists():
                from .db.sqlite import SQLite

                return SQLite(df_or_db_path)
            else:
                from .db.postgresql import PostgreSQL

                return PostgreSQL(df_or_db_path)
        else:
            raise ValueError(
                f"Could not resolve '{df_or_db_path}' to a valid database type!"
            )

    def execute(
        self,
        query: str,
        ingredients: t.Optional[Collection[t.Type[Ingredient]]] = None,
        model: t.Optional[str] = None,
        infer_gen_constraints: t.Optional[bool] = None,
        verbose: t.Optional[bool] = None,
    ) -> Smoothie:
        '''The `execute()` function is used to execute a BlendSQL query against a database and
        return the final result, in addition to the intermediate reasoning steps taken.
        Execution is done on a database given an ingredient context.

        Args:
            query: The BlendSQL query to execute
            ingredients: Collection of ingredient objects, to use in interpreting BlendSQL query
            verbose: Boolean defining whether to run with logger in debug mode
            default_model: Which BlendSQL model to use in performing ingredient tasks in the current query
            infer_gen_constraints: Optionally infer the output format of an `IngredientMap` call, given the predicate context
                For example, in `{{LLMMap('convert to date', 'w::listing date')}} <= '1960-12-31'`
                We can infer the output format should look like '1960-12-31' and both:
                    1) Put this string in the `example_outputs` kwarg
                    2) If we have a LocalModel, pass the r'\d{4}-\d{2}-\d{2}' pattern to guidance
            table_to_title: Optional mapping from table name to title of table.
                Useful for datasets like WikiTableQuestions, where relevant info is stored in table title.

        Returns:
            smoothie: `Smoothie` dataclass containing pd.DataFrame output and execution metadata

        Examples:
            ```python
            import pandas as pd

            from blendsql import BlendSQL, config
            from blendsql.ingredients import LLMMap, LLMQA, LLMJoin
            from blendsql.models import LiteLLM, TransformersLLM

            # Optionally set how many async calls to allow concurrently
            # This depends on your OpenAI/Anthropic/etc. rate limits
            config.set_async_limit(10)

            # Load model
            model = LiteLLM("openai/gpt-4o-mini") # requires .env file with `OPENAI_API_KEY`
            # model = LiteLLM("anthropic/claude-3-haiku-20240307") # requires .env file with `ANTHROPIC_API_KEY`
            # model = TransformersLLM(
            #    "meta-llama/Llama-3.2-1B-Instruct",
            #    config={"chat_template": Llama3ChatTemplate, "device_map": "auto"},
            # ) # run with any local Transformers model

            # Prepare our BlendSQL connection
            bsql = BlendSQL(
                {
                    "People": pd.DataFrame(
                        {
                            "Name": [
                                "George Washington",
                                "John Quincy Adams",
                                "Thomas Jefferson",
                                "James Madison",
                                "James Monroe",
                                "Alexander Hamilton",
                                "Sabrina Carpenter",
                                "Charli XCX",
                                "Elon Musk",
                                "Michelle Obama",
                                "Elvis Presley",
                            ],
                            "Known_For": [
                                "Established federal government, First U.S. President",
                                "XYZ Affair, Alien and Sedition Acts",
                                "Louisiana Purchase, Declaration of Independence",
                                "War of 1812, Constitution",
                                "Monroe Doctrine, Missouri Compromise",
                                "Created national bank, Federalist Papers",
                                "Nonsense, Emails I Cant Send, Mean Girls musical",
                                "Crash, How Im Feeling Now, Boom Clap",
                                "Tesla, SpaceX, Twitter/X acquisition",
                                "Lets Move campaign, Becoming memoir",
                                "14 Grammys, King of Rock n Roll",
                            ],
                        }
                    ),
                    "Eras": pd.DataFrame({"Years": ["1800-1900", "1900-2000", "2000-Now"]}),
                },
                ingredients={LLMMap, LLMQA, LLMJoin},
                model=model,
            )

            smoothie = bsql.execute(
                """
                SELECT * FROM People P
                WHERE P.Name IN {{
                    LLMQA('First 3 presidents of the U.S?', quantifier='{3}')
                }}
                """
            )

            print(smoothie.df)
            # ┌───────────────────┬───────────────────────────────────────────────────────┐
            # │ Name              │ Known_For                                             │
            # ├───────────────────┼───────────────────────────────────────────────────────┤
            # │ George Washington │ Established federal government, First U.S. Preside... │
            # │ John Quincy Adams │ XYZ Affair, Alien and Sedition Acts                   │
            # │ Thomas Jefferson  │ Louisiana Purchase, Declaration of Independence       │
            # └───────────────────┴───────────────────────────────────────────────────────┘
            print(smoothie.summary())
            # ┌────────────┬──────────────────────┬─────────────────┬─────────────────────┐
            # │   Time (s) │   # Generation Calls │   Prompt Tokens │   Completion Tokens │
            # ├────────────┼──────────────────────┼─────────────────┼─────────────────────┤
            # │    1.25158 │                    1 │             296 │                  16 │
            # └────────────┴──────────────────────┴─────────────────┴─────────────────────┘
            ```
        '''
        self._toggle_verbosity(verbose if verbose is not None else self.verbose)

        start = time.time()
        model_in_use = model or self.model
        try:
            smoothie = _blend(
                query=query,
                db=self.db,
                default_model=model_in_use,
                ingredients=self._merge_default_ingredients(
                    ingredients or self.ingredients
                ),
                infer_gen_constraints=infer_gen_constraints
                if infer_gen_constraints is not None
                else self.infer_gen_constraints,
                table_to_title=self.table_to_title,
            )
        except Exception as error:
            raise error
        finally:
            # In the case of a recursive `_blend()` call,
            #   this logic allows temp tables to persist until
            #   the final base case is fulfilled.
            self.db._reset_connection()
        smoothie.meta.process_time_seconds = time.time() - start
        # Reset model stats, so future executions don't add here
        if model_in_use is not None:
            model_in_use.reset_stats()
        return smoothie

execute(query, ingredients=None, model=None, infer_gen_constraints=None, verbose=None)

The execute() function is used to execute a BlendSQL query against a database and return the final result, in addition to the intermediate reasoning steps taken. Execution is done on a database given an ingredient context.

Parameters:

Name Type Description Default
query str

The BlendSQL query to execute

required
ingredients Optional[Collection[Type[Ingredient]]]

Collection of ingredient objects, to use in interpreting BlendSQL query

None
verbose Optional[bool]

Boolean defining whether to run with logger in debug mode

None
default_model

Which BlendSQL model to use in performing ingredient tasks in the current query

required
infer_gen_constraints Optional[bool]

Optionally infer the output format of an IngredientMap call, given the predicate context For example, in {{LLMMap('convert to date', 'w::listing date')}} <= '1960-12-31' We can infer the output format should look like '1960-12-31' and both: 1) Put this string in the example_outputs kwarg 2) If we have a LocalModel, pass the r'\d{4}-\d{2}-\d{2}' pattern to guidance

None
table_to_title

Optional mapping from table name to title of table. Useful for datasets like WikiTableQuestions, where relevant info is stored in table title.

required

Returns:

Name Type Description
smoothie Smoothie

Smoothie dataclass containing pd.DataFrame output and execution metadata

Examples:

import pandas as pd

from blendsql import BlendSQL, config
from blendsql.ingredients import LLMMap, LLMQA, LLMJoin
from blendsql.models import LiteLLM, TransformersLLM

# Optionally set how many async calls to allow concurrently
# This depends on your OpenAI/Anthropic/etc. rate limits
config.set_async_limit(10)

# Load model
model = LiteLLM("openai/gpt-4o-mini") # requires .env file with `OPENAI_API_KEY`
# model = LiteLLM("anthropic/claude-3-haiku-20240307") # requires .env file with `ANTHROPIC_API_KEY`
# model = TransformersLLM(
#    "meta-llama/Llama-3.2-1B-Instruct",
#    config={"chat_template": Llama3ChatTemplate, "device_map": "auto"},
# ) # run with any local Transformers model

# Prepare our BlendSQL connection
bsql = BlendSQL(
    {
        "People": pd.DataFrame(
            {
                "Name": [
                    "George Washington",
                    "John Quincy Adams",
                    "Thomas Jefferson",
                    "James Madison",
                    "James Monroe",
                    "Alexander Hamilton",
                    "Sabrina Carpenter",
                    "Charli XCX",
                    "Elon Musk",
                    "Michelle Obama",
                    "Elvis Presley",
                ],
                "Known_For": [
                    "Established federal government, First U.S. President",
                    "XYZ Affair, Alien and Sedition Acts",
                    "Louisiana Purchase, Declaration of Independence",
                    "War of 1812, Constitution",
                    "Monroe Doctrine, Missouri Compromise",
                    "Created national bank, Federalist Papers",
                    "Nonsense, Emails I Cant Send, Mean Girls musical",
                    "Crash, How Im Feeling Now, Boom Clap",
                    "Tesla, SpaceX, Twitter/X acquisition",
                    "Lets Move campaign, Becoming memoir",
                    "14 Grammys, King of Rock n Roll",
                ],
            }
        ),
        "Eras": pd.DataFrame({"Years": ["1800-1900", "1900-2000", "2000-Now"]}),
    },
    ingredients={LLMMap, LLMQA, LLMJoin},
    model=model,
)

smoothie = bsql.execute(
    """
    SELECT * FROM People P
    WHERE P.Name IN {{
        LLMQA('First 3 presidents of the U.S?', quantifier='{3}')
    }}
    """
)

print(smoothie.df)
# ┌───────────────────┬───────────────────────────────────────────────────────┐
# │ Name              │ Known_For                                             │
# ├───────────────────┼───────────────────────────────────────────────────────┤
# │ George Washington │ Established federal government, First U.S. Preside... │
# │ John Quincy Adams │ XYZ Affair, Alien and Sedition Acts                   │
# │ Thomas Jefferson  │ Louisiana Purchase, Declaration of Independence       │
# └───────────────────┴───────────────────────────────────────────────────────┘
print(smoothie.summary())
# ┌────────────┬──────────────────────┬─────────────────┬─────────────────────┐
# │   Time (s) │   # Generation Calls │   Prompt Tokens │   Completion Tokens │
# ├────────────┼──────────────────────┼─────────────────┼─────────────────────┤
# │    1.25158 │                    1 │             296 │                  16 │
# └────────────┴──────────────────────┴─────────────────┴─────────────────────┘
Source code in blendsql/blendsql.py
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
def execute(
    self,
    query: str,
    ingredients: t.Optional[Collection[t.Type[Ingredient]]] = None,
    model: t.Optional[str] = None,
    infer_gen_constraints: t.Optional[bool] = None,
    verbose: t.Optional[bool] = None,
) -> Smoothie:
    '''The `execute()` function is used to execute a BlendSQL query against a database and
    return the final result, in addition to the intermediate reasoning steps taken.
    Execution is done on a database given an ingredient context.

    Args:
        query: The BlendSQL query to execute
        ingredients: Collection of ingredient objects, to use in interpreting BlendSQL query
        verbose: Boolean defining whether to run with logger in debug mode
        default_model: Which BlendSQL model to use in performing ingredient tasks in the current query
        infer_gen_constraints: Optionally infer the output format of an `IngredientMap` call, given the predicate context
            For example, in `{{LLMMap('convert to date', 'w::listing date')}} <= '1960-12-31'`
            We can infer the output format should look like '1960-12-31' and both:
                1) Put this string in the `example_outputs` kwarg
                2) If we have a LocalModel, pass the r'\d{4}-\d{2}-\d{2}' pattern to guidance
        table_to_title: Optional mapping from table name to title of table.
            Useful for datasets like WikiTableQuestions, where relevant info is stored in table title.

    Returns:
        smoothie: `Smoothie` dataclass containing pd.DataFrame output and execution metadata

    Examples:
        ```python
        import pandas as pd

        from blendsql import BlendSQL, config
        from blendsql.ingredients import LLMMap, LLMQA, LLMJoin
        from blendsql.models import LiteLLM, TransformersLLM

        # Optionally set how many async calls to allow concurrently
        # This depends on your OpenAI/Anthropic/etc. rate limits
        config.set_async_limit(10)

        # Load model
        model = LiteLLM("openai/gpt-4o-mini") # requires .env file with `OPENAI_API_KEY`
        # model = LiteLLM("anthropic/claude-3-haiku-20240307") # requires .env file with `ANTHROPIC_API_KEY`
        # model = TransformersLLM(
        #    "meta-llama/Llama-3.2-1B-Instruct",
        #    config={"chat_template": Llama3ChatTemplate, "device_map": "auto"},
        # ) # run with any local Transformers model

        # Prepare our BlendSQL connection
        bsql = BlendSQL(
            {
                "People": pd.DataFrame(
                    {
                        "Name": [
                            "George Washington",
                            "John Quincy Adams",
                            "Thomas Jefferson",
                            "James Madison",
                            "James Monroe",
                            "Alexander Hamilton",
                            "Sabrina Carpenter",
                            "Charli XCX",
                            "Elon Musk",
                            "Michelle Obama",
                            "Elvis Presley",
                        ],
                        "Known_For": [
                            "Established federal government, First U.S. President",
                            "XYZ Affair, Alien and Sedition Acts",
                            "Louisiana Purchase, Declaration of Independence",
                            "War of 1812, Constitution",
                            "Monroe Doctrine, Missouri Compromise",
                            "Created national bank, Federalist Papers",
                            "Nonsense, Emails I Cant Send, Mean Girls musical",
                            "Crash, How Im Feeling Now, Boom Clap",
                            "Tesla, SpaceX, Twitter/X acquisition",
                            "Lets Move campaign, Becoming memoir",
                            "14 Grammys, King of Rock n Roll",
                        ],
                    }
                ),
                "Eras": pd.DataFrame({"Years": ["1800-1900", "1900-2000", "2000-Now"]}),
            },
            ingredients={LLMMap, LLMQA, LLMJoin},
            model=model,
        )

        smoothie = bsql.execute(
            """
            SELECT * FROM People P
            WHERE P.Name IN {{
                LLMQA('First 3 presidents of the U.S?', quantifier='{3}')
            }}
            """
        )

        print(smoothie.df)
        # ┌───────────────────┬───────────────────────────────────────────────────────┐
        # │ Name              │ Known_For                                             │
        # ├───────────────────┼───────────────────────────────────────────────────────┤
        # │ George Washington │ Established federal government, First U.S. Preside... │
        # │ John Quincy Adams │ XYZ Affair, Alien and Sedition Acts                   │
        # │ Thomas Jefferson  │ Louisiana Purchase, Declaration of Independence       │
        # └───────────────────┴───────────────────────────────────────────────────────┘
        print(smoothie.summary())
        # ┌────────────┬──────────────────────┬─────────────────┬─────────────────────┐
        # │   Time (s) │   # Generation Calls │   Prompt Tokens │   Completion Tokens │
        # ├────────────┼──────────────────────┼─────────────────┼─────────────────────┤
        # │    1.25158 │                    1 │             296 │                  16 │
        # └────────────┴──────────────────────┴─────────────────┴─────────────────────┘
        ```
    '''
    self._toggle_verbosity(verbose if verbose is not None else self.verbose)

    start = time.time()
    model_in_use = model or self.model
    try:
        smoothie = _blend(
            query=query,
            db=self.db,
            default_model=model_in_use,
            ingredients=self._merge_default_ingredients(
                ingredients or self.ingredients
            ),
            infer_gen_constraints=infer_gen_constraints
            if infer_gen_constraints is not None
            else self.infer_gen_constraints,
            table_to_title=self.table_to_title,
        )
    except Exception as error:
        raise error
    finally:
        # In the case of a recursive `_blend()` call,
        #   this logic allows temp tables to persist until
        #   the final base case is fulfilled.
        self.db._reset_connection()
    smoothie.meta.process_time_seconds = time.time() - start
    # Reset model stats, so future executions don't add here
    if model_in_use is not None:
        model_in_use.reset_stats()
    return smoothie