Python library guide
This guide covers configuration, backend setup, error handling, and result objects for the featuremesh Python package. For installation and basic usage, see the Python library page.
Configuration
Use set_default() to configure the registry endpoint, display preferences, and other settings:
from featuremesh import set_default
# API endpoints
set_default("registry.host", "https://api.featuremesh.com")
set_default("registry.path", "/v1/featureql")
set_default("registry.timeout", 30)
# Display preferences
set_default("debug_mode", False)
set_default("show_sql", True)
# Get current settings
from featuremesh import get_default, get_all_defaults
debug_mode = get_default("debug_mode")
all_settings = get_all_defaults() Backend setup
Each backend needs a sql_executor function that takes a SQL string and returns a Pandas DataFrame. Here are ready-to-use examples for each supported backend.
DuckDB
from featuremesh import OfflineClient, Backend
import duckdb
# Option 1: Using a persistent connection
_duckdb_conn = None
def get_duckdb_conn(storage_path: str = ":memory:"):
"""Get or create a DuckDB connection."""
global _duckdb_conn
if _duckdb_conn is None:
_duckdb_conn = duckdb.connect(storage_path)
return _duckdb_conn
def query_duckdb(sql: str, storage_path: str = ":memory:"):
"""Execute SQL query and return results as DataFrame."""
conn = get_duckdb_conn(storage_path)
result = conn.sql(sql)
return result.df()
client = OfflineClient(
access_token=__YOUR_ACCESS_TOKEN__,
backend=Backend.DUCKDB,
sql_executor=query_duckdb
)
# Option 2: Simple in-memory executor
def simple_duckdb_executor(sql: str):
return duckdb.sql(sql).df()
client = OfflineClient(
access_token=__YOUR_ACCESS_TOKEN__,
backend=Backend.DUCKDB,
sql_executor=simple_duckdb_executor
) Trino
from featuremesh import OfflineClient, Backend
import pandas as pd
import trino.dbapi
def query_trino(sql: str):
"""Execute SQL query on Trino and return results as DataFrame."""
# Configure your Trino connection details
conn = trino.dbapi.connect(
host="localhost", # or host.docker.internal for docker
port=8080,
user="admin",
catalog="memory",
schema="default"
)
cur = conn.cursor()
cur.execute(sql)
# Fetch results
cols = cur.description
rows = cur.fetchall()
if len(rows) > 0:
df = pd.DataFrame(rows, columns=[col[0] for col in cols])
return df
else:
return pd.DataFrame()
client = OfflineClient(
access_token=__YOUR_ACCESS_TOKEN__,
backend=Backend.TRINO,
sql_executor=query_trino
)
# For production with OAuth2 authentication:
import trino.auth
def query_trino_oauth(sql: str):
"""Execute SQL query on Trino with OAuth2 authentication."""
conn = trino.dbapi.connect(
host="trino.your-domain.com",
port=443,
user="your-username",
catalog="your-catalog",
schema="default",
http_scheme="https",
auth=trino.auth.OAuth2Authentication()
)
cur = conn.cursor()
cur.execute(sql)
cols = cur.description
rows = cur.fetchall()
if len(rows) > 0:
return pd.DataFrame(rows, columns=[col[0] for col in cols])
return pd.DataFrame() BigQuery
from featuremesh import OfflineClient, Backend
from google.cloud import bigquery
def query_bigquery(sql: str):
"""Execute SQL query on BigQuery and return results as DataFrame."""
client = bigquery.Client(project=__YOUR_PROJECT_ID__)
return client.query(sql).to_dataframe()
client = OfflineClient(
access_token=__YOUR_ACCESS_TOKEN__,
backend=Backend.BIGQUERY,
sql_executor=query_bigquery
) Error handling
All operations return result objects with structured error information. Check result.success before accessing the DataFrame:
result = client.query("""
WITH
FEATURE1 := INPUT(BIGINT)
SELECT
FEATURE1 := BIND_VALUES(ARRAY[1, 2, 3]),
FEATURE2 := FEATURE1 * 2
""")
if result.success:
print("Query succeeded!")
print(result.dataframe)
else:
print("Query failed!")
for error in result.errors:
print(f"Error [{error.code}]: {error.message}")
if error.context:
print(f"Context: {error.context}") For richer error display in notebooks:
from featuremesh import display_errors, display_warnings
display_errors(result.errors)
display_warnings(result.warnings) Translation only
You can also translate FeatureQL to SQL without executing it — useful for debugging or integrating with other tools:
# Only available with OfflineClient
featureql_query = """
WITH
FEATURE1 := INPUT(BIGINT)
SELECT
FEATURE1 := BIND_VALUES(ARRAY[1, 2, 3]),
FEATURE2 := FEATURE1 * 2
"""
translate_result = client_offline.translate(featureql_query)
print(translate_result.sql) # Generated SQL
print(translate_result.success) # True if translation succeeded Debug mode
Pass debug_mode=True to see the intermediate translation steps — useful for understanding how FeatureQL resolves dependencies and generates SQL:
result = client.query("""
WITH
FEATURE1 := INPUT(BIGINT)
SELECT
FEATURE1 := BIND_VALUES(ARRAY[1, 2, 3]),
FEATURE2 := FEATURE1 * 2
""", debug_mode=True)
if result.debug_logs:
print(result.debug_logs) Result objects
QueryResult
Returned by client.query(). The most important fields are success, dataframe, and sql:
@dataclass
class QueryResult:
featureql: str # Original FeatureQL query
sql: Optional[str] # Translated SQL
dataframe: Optional[pd.DataFrame] # Query results
slt: Optional[str] # SLT format (offline only)
warnings: list[Warning] # Non-blocking warnings
errors: list[Error] # Errors that occurred
backend: Optional[str] # Backend used
debug_mode: bool # Debug mode enabled
debug_logs: Optional[dict] # Debug information
client_type: str # "OfflineClient" or "OnlineClient"
success: bool # Property: True if no errors TranslateResult
Returned by client.translate() (offline client only):
@dataclass
class TranslateResult:
featureql: str # Original FeatureQL query
sql: Optional[str] # Translated SQL
warnings: list[Warning] # Non-blocking warnings
errors: list[Error] # Errors that occurred
full_response: Optional[dict] # Full API response
backend: Optional[str] # Backend used
debug_mode: bool # Debug mode enabled
debug_logs: Optional[dict] # Debug information
client_type: str # "OfflineClient"
success: bool # Property: True if no errors Version
import featuremesh
print(featuremesh.__version__) See also
- Python library — Installation and quick start
- Getting started — Overview of all entry points
- FeatureQL for the Impatient — Quick tour of the language for SQL users
- Demos Docker container — Full environment with notebooks and sample data