import ibis
= True ibis.options.interactive
Why Ibis?
Ibis defines a Python dataframe API that executes on any query engine – the frontend for any backend data platform, with nearly 20 backends today. This allows Ibis to have excellent performance – as good as the backend it is connected to – with a consistent user experience.
What is Ibis?
Ibis is the portable Python dataframe library.
We can demonstrate this with a simple example on a few local query engines:
= ibis.connect("duckdb://")
con
= con.read_parquet("penguins.parquet")
t 3) t.limit(
- 1
- Change only your connection to switch between backends.
┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ body_mass_g ┃ sex ┃ year ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━┩ │ string │ string │ float64 │ float64 │ int64 │ int64 │ string │ int64 │ ├─────────┼───────────┼────────────────┼───────────────┼───────────────────┼─────────────┼────────┼───────┤ │ Adelie │ Torgersen │ 39.1 │ 18.7 │ 181 │ 3750 │ male │ 2007 │ │ Adelie │ Torgersen │ 39.5 │ 17.4 │ 186 │ 3800 │ female │ 2007 │ │ Adelie │ Torgersen │ 40.3 │ 18.0 │ 195 │ 3250 │ female │ 2007 │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴─────────────┴────────┴───────┘
"species", "island"]).agg(count=t.count()).order_by("count") t.group_by([
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
= ibis.connect("polars://")
con
= con.read_parquet("penguins.parquet")
t 3) t.limit(
- 1
- Change only your connection to switch between backends.
┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ body_mass_g ┃ sex ┃ year ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━┩ │ string │ string │ float64 │ float64 │ int64 │ int64 │ string │ int64 │ ├─────────┼───────────┼────────────────┼───────────────┼───────────────────┼─────────────┼────────┼───────┤ │ Adelie │ Torgersen │ 39.1 │ 18.7 │ 181 │ 3750 │ male │ 2007 │ │ Adelie │ Torgersen │ 39.5 │ 17.4 │ 186 │ 3800 │ female │ 2007 │ │ Adelie │ Torgersen │ 40.3 │ 18.0 │ 195 │ 3250 │ female │ 2007 │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴─────────────┴────────┴───────┘
"species", "island"]).agg(count=t.count()).order_by("count") t.group_by([
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
= ibis.connect("datafusion://")
con
= con.read_parquet("penguins.parquet")
t 3) t.limit(
- 1
- Change only your connection to switch between backends.
┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ body_mass_g ┃ sex ┃ year ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━┩ │ string │ string │ float64 │ float64 │ int64 │ int64 │ string │ int64 │ ├─────────┼───────────┼────────────────┼───────────────┼───────────────────┼─────────────┼────────┼───────┤ │ Adelie │ Torgersen │ 39.1 │ 18.7 │ 181 │ 3750 │ male │ 2007 │ │ Adelie │ Torgersen │ 39.5 │ 17.4 │ 186 │ 3800 │ female │ 2007 │ │ Adelie │ Torgersen │ 40.3 │ 18.0 │ 195 │ 3250 │ female │ 2007 │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴─────────────┴────────┴───────┘
"species", "island"]).agg(count=t.count()).order_by("count") t.group_by([
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
= ibis.connect("pyspark://")
con
= con.read_parquet("penguins.parquet")
t 3) t.limit(
- 1
- Change only your connection to switch between backends.
┏━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ bill_length_mm ┃ bill_depth_mm ┃ flipper_length_mm ┃ body_mass_g ┃ sex ┃ year ┃ ┡━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━╇━━━━━━━┩ │ string │ string │ float64 │ float64 │ int64 │ int64 │ string │ int64 │ ├─────────┼───────────┼────────────────┼───────────────┼───────────────────┼─────────────┼────────┼───────┤ │ Adelie │ Torgersen │ 39.1 │ 18.7 │ 181 │ 3750 │ male │ 2007 │ │ Adelie │ Torgersen │ 39.5 │ 17.4 │ 186 │ 3800 │ female │ 2007 │ │ Adelie │ Torgersen │ 40.3 │ 18.0 │ 195 │ 3250 │ female │ 2007 │ └─────────┴───────────┴────────────────┴───────────────┴───────────────────┴─────────────┴────────┴───────┘
"species", "island"]).agg(count=t.count()).order_by("count") t.group_by([
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
Who is Ibis for?
Ibis is for data engineers, data analysts, and data scientists (or any title that needs to work with data!) to use directly with their data platform(s) of choice. It also has benefits for data platforms, organizations, and library developers.
Ibis for practitioners
You can use Ibis at any stage of your data workflow, no matter your role.
Data engineers can use Ibis to:
- write and maintain complex ETL/ELT jobs
- replace fragile SQL string pipelines with a robust Python API
- replace PySpark with a more Pythonic API that supports Spark and many other backends
Data analysts can use Ibis to:
- use Ibis interactive mode for rapid exploration
- perform rapid exploratory data analysis using interactive mode
- create end-to-end analytics workflows
- work in a general-purpose, yet easy to learn, programming language without the need for formatting SQL strings
Data scientists can use Ibis to:
- extract a sample of data for local iteration with a fast local backend
- prototype with the same API that will be used in production
- preprocess and feature engineer data before training a machine learning model
Ibis for data platforms
Data platforms can use Ibis to quickly bring a fully-featured Python dataframe library with minimal effort to their platform. In addition to a great Python dataframe experience for their users, they also get integrations into the broader Python and ML ecosystem.
Often, data platforms evolve to support Python in some sequence like:
- Develop a fast query engine with a SQL frontend
- Gain popularity and need to support Python for data science and ML use cases
- Develop a bespoke pandas or PySpark-like dataframe library and ML integrations
This third step is where Ibis comes in. Instead of spending a lot of time and money developing a bespoke Python dataframe library, you can create an Ibis backend for your data platform in as little as four hours for an experienced Ibis developer or, more typically, on the order of one or two months for a new contributor.
The pandas API inherently does not scale due to its single-threaded design, ordered index, and a lot of API baggage. The creator of pandas (and Ibis!) has talked about the issues with pandas publicly. While there have been projects attempting to scale the pandas API, they always result in a dubious support matrix. You can see that with Modin or pandas on Spark (formerly known as Koalas).
Google BigQuery DataFrames is a more modern attempt to scale the pandas API built on top of Ibis. If you are going to build a pandas API we recommend you take a look at this project.
PySpark is a great API for Spark, but not very Pythonic and tightly coupled to the Spark execution engine.
Ibis takes inspiration from pandas and PySpark – and R and SQL – but is designed to be scalable from the start. If offers a neutral, self-governed open source option for your data platform.
Ibis for organizations
Organizations can use Ibis to standardize the interface for SQL and Python data practitioners. It also allows organizations to:
- transfer data between systems
- transform, analyze, and prepare data where it lives
- benchmark your workload(s) across data systems using the same code
- mix SQL and Python code seamlessly, with all the benefits of a general-purpose programming language, type checking, and expression validation
Ibis for library developers
Python developers creating libraries can use Ibis to:
- instantly support nearly 20 data backends
- instantly support pandas, PyArrow, and Polars objects
- read and write from all common file formats (depending on the backend)
- trace column-level lineage through Ibis expressions
- compile Ibis expressions to SQL or Substrait
- perform cross-dialect SQL transpilation (powered by SQLGlot)
How does Ibis work?
Most Python dataframes are tightly coupled to their execution engine. And many databases only support SQL, with no Python API. Ibis solves this problem by providing a common API for data manipulation in Python, and compiling that API into the backend’s native language. This means you can learn a single API and use it across any supported backend (execution engine).
Ibis broadly supports two types of backend:
- SQL-generating backends
- DataFrame-generating backends
As you can see, most backends generate SQL. Ibis uses SQLGlot to transform Ibis expressions into SQL strings. You can also use the .sql()
methods to mix in SQL strings, compiling them to Ibis expressions.
While portability with Ibis isn’t perfect, commonalities across backends and SQL dialects combined with years of engineering effort produce a full-featured and robust framework for data manipulation in Python.
In the long-term, we aim for a standard query plan Intermediate Representation (IR) like Substrait to simplify this further.
Python + SQL: better together
For most backends, Ibis works by compiling Python expressions into SQL:
= t.group_by(["species", "island"]).agg(count=t.count()).order_by("count")
g ibis.to_sql(g)
SELECT
*
FROM (
SELECT
`t0`.`species`,
`t0`.`island`,COUNT(*) AS `count`
FROM `ibis_read_parquet_dno2q3xgujdl7g3it36jtpp4oq` AS `t0`
GROUP BY
1,
2
AS `t1`
) ORDER BY
ASC NULLS LAST `t1`.`count`
You can mix and match Python and SQL code:
= """
sql SELECT
species,
island,
COUNT(*) AS count
FROM penguins
GROUP BY species, island
""".strip()
= ibis.connect("duckdb://")
con = con.read_parquet("penguins.parquet")
t = t.alias("penguins").sql(sql)
g g
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Dream │ 56 │ │ Gentoo │ Biscoe │ 124 │ │ Chinstrap │ Dream │ 68 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Biscoe │ 44 │ └───────────┴───────────┴───────┘
"count") g.order_by(
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
= ibis.connect("datafusion://")
con = con.read_parquet("penguins.parquet")
t = t.alias("penguins").sql(sql)
g g
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━┩ │ string │ string │ !int64 │ ├───────────┼───────────┼────────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Adelie │ Torgersen │ 52 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴────────┘
"count") g.order_by(
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━━┩ │ string │ string │ !int64 │ ├───────────┼───────────┼────────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴────────┘
= ibis.connect("pyspark://")
con = con.read_parquet("penguins.parquet")
t = t.alias("penguins").sql(sql)
g g
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Chinstrap │ Dream │ 68 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
"count") g.order_by(
┏━━━━━━━━━━━┳━━━━━━━━━━━┳━━━━━━━┓ ┃ species ┃ island ┃ count ┃ ┡━━━━━━━━━━━╇━━━━━━━━━━━╇━━━━━━━┩ │ string │ string │ int64 │ ├───────────┼───────────┼───────┤ │ Adelie │ Biscoe │ 44 │ │ Adelie │ Torgersen │ 52 │ │ Adelie │ Dream │ 56 │ │ Chinstrap │ Dream │ 68 │ │ Gentoo │ Biscoe │ 124 │ └───────────┴───────────┴───────┘
This allows you to combine the flexibility of Python with the scale and performance of modern SQL.
Scaling up and out
Out of the box, Ibis offers a great local experience for working with many file formats. You can scale up with DuckDB (the default backend) or choose from other great options like Polars and DataFusion to work locally with large datasets. Once you hit scaling issues on a local machine, you can continue scaling up with a larger machine in the cloud using the same backend and same code.
If you hit scaling issues on a large single-node machine, you can switch to a distributed backend like PySpark, BigQuery, or Trino by simply changing your connection string.
Stream-batch unification
As of Ibis 8.0, the first stream processing backends have been added. Since these systems tend to support SQL, we can with minimal changes to Ibis support both batch and streaming workloads with a single API. We aim to further unify the batch and streaming paradigms going forward.
Ecosystem
Ibis is part of a larger ecosystem of Python data tools. It is designed to work well with other tools in this ecosystem, and we continue to make it easier to use Ibis with other tools over time.
Ibis already works with other Python dataframes like:
Ibis already works well with visualization libraries like:
Ibis already works well with dashboarding libraries like:
Ibis already works well with machine learning libraries like:
Supported backends
You can install Ibis and a supported backend with pip
, conda
, mamba
, or pixi
.
Install with the bigquery
extra:
pip install 'ibis-framework[bigquery]'
Connect using ibis.bigquery.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the clickhouse
extra:
pip install 'ibis-framework[clickhouse]'
Connect using ibis.clickhouse.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the datafusion
extra:
pip install 'ibis-framework[datafusion]'
Connect using ibis.datafusion.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the druid
extra:
pip install 'ibis-framework[druid]'
Connect using ibis.druid.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the duckdb
extra:
pip install 'ibis-framework[duckdb]'
Connect using ibis.duckdb.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the exasol
extra:
pip install 'ibis-framework[exasol]'
Connect using ibis.exasol.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install alongside the apache-flink
package:
pip install ibis-framework apache-flink
Connect using ibis.flink.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the impala
extra:
pip install 'ibis-framework[impala]'
Connect using ibis.impala.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the mssql
extra:
pip install 'ibis-framework[mssql]'
Connect using ibis.mssql.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the mysql
extra:
pip install 'ibis-framework[mysql]'
Connect using ibis.mysql.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the oracle
extra:
pip install 'ibis-framework[oracle]'
Connect using ibis.oracle.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the polars
extra:
pip install 'ibis-framework[polars]'
Connect using ibis.polars.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the postgres
extra:
pip install 'ibis-framework[postgres]'
Connect using ibis.postgres.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the pyspark
extra:
pip install 'ibis-framework[pyspark]'
Connect using ibis.pyspark.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the risingwave
extra:
pip install 'ibis-framework[risingwave]'
Connect using ibis.risingwave.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the snowflake
extra:
pip install 'ibis-framework[snowflake]'
Connect using ibis.snowflake.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the sqlite
extra:
pip install 'ibis-framework[sqlite]'
Connect using ibis.sqlite.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install with the trino
extra:
pip install 'ibis-framework[trino]'
Connect using ibis.trino.connect
.
Note that the ibis-framework
package is not the same as the ibis
package in PyPI. These two libraries cannot coexist in the same Python environment, as they are both imported with the ibis
module name.
Install the ibis-bigquery
package:
conda install -c conda-forge ibis-bigquery
Connect using ibis.bigquery.connect
.
Install the ibis-clickhouse
package:
conda install -c conda-forge ibis-clickhouse
Connect using ibis.clickhouse.connect
.
Install the ibis-datafusion
package:
conda install -c conda-forge ibis-datafusion
Connect using ibis.datafusion.connect
.
Install the ibis-druid
package:
conda install -c conda-forge ibis-druid
Connect using ibis.druid.connect
.
Install the ibis-duckdb
package:
conda install -c conda-forge ibis-duckdb
Connect using ibis.duckdb.connect
.
Install the ibis-exasol
package:
conda install -c conda-forge ibis-exasol
Connect using ibis.exasol.connect
.
Install the ibis-flink
package:
conda install -c conda-forge ibis-flink
Connect using ibis.flink.connect
.
Install the ibis-impala
package:
conda install -c conda-forge ibis-impala
Connect using ibis.impala.connect
.
Install the ibis-mssql
package:
conda install -c conda-forge ibis-mssql
Connect using ibis.mssql.connect
.
Install the ibis-mysql
package:
conda install -c conda-forge ibis-mysql
Connect using ibis.mysql.connect
.
Install the ibis-oracle
package:
conda install -c conda-forge ibis-oracle
Connect using ibis.oracle.connect
.
Install the ibis-polars
package:
conda install -c conda-forge ibis-polars
Connect using ibis.polars.connect
.
Install the ibis-postgres
package:
conda install -c conda-forge ibis-postgres
Connect using ibis.postgres.connect
.
Install the ibis-pyspark
package:
conda install -c conda-forge ibis-pyspark
Connect using ibis.pyspark.connect
.
Install the ibis-risingwave
package:
conda install -c conda-forge ibis-risingwave
Connect using ibis.risingwave.connect
.
Install the ibis-snowflake
package:
conda install -c conda-forge ibis-snowflake
Connect using ibis.snowflake.connect
.
Install the ibis-sqlite
package:
conda install -c conda-forge ibis-sqlite
Connect using ibis.sqlite.connect
.
Install the ibis-trino
package:
conda install -c conda-forge ibis-trino
Connect using ibis.trino.connect
.
Install the ibis-bigquery
package:
mamba install -c conda-forge ibis-bigquery
Connect using ibis.bigquery.connect
.
Install the ibis-clickhouse
package:
mamba install -c conda-forge ibis-clickhouse
Connect using ibis.clickhouse.connect
.
Install the ibis-datafusion
package:
mamba install -c conda-forge ibis-datafusion
Connect using ibis.datafusion.connect
.
Install the ibis-druid
package:
mamba install -c conda-forge ibis-druid
Connect using ibis.druid.connect
.
Install the ibis-duckdb
package:
mamba install -c conda-forge ibis-duckdb
Connect using ibis.duckdb.connect
.
Install the ibis-exasol
package:
mamba install -c conda-forge ibis-exasol
Connect using ibis.exasol.connect
.
Install the ibis-flink
package:
mamba install -c conda-forge ibis-flink
Connect using ibis.flink.connect
.
Install the ibis-impala
package:
mamba install -c conda-forge ibis-impala
Connect using ibis.impala.connect
.
Install the ibis-mssql
package:
mamba install -c conda-forge ibis-mssql
Connect using ibis.mssql.connect
.
Install the ibis-mysql
package:
mamba install -c conda-forge ibis-mysql
Connect using ibis.mysql.connect
.
Install the ibis-oracle
package:
mamba install -c conda-forge ibis-oracle
Connect using ibis.oracle.connect
.
Install the ibis-polars
package:
mamba install -c conda-forge ibis-polars
Connect using ibis.polars.connect
.
Install the ibis-postgres
package:
mamba install -c conda-forge ibis-postgres
Connect using ibis.postgres.connect
.
Install the ibis-pyspark
package:
mamba install -c conda-forge ibis-pyspark
Connect using ibis.pyspark.connect
.
Install the ibis-risingwave
package:
mamba install -c conda-forge ibis-risingwave
Connect using ibis.risingwave.connect
.
Install the ibis-snowflake
package:
mamba install -c conda-forge ibis-snowflake
Connect using ibis.snowflake.connect
.
Install the ibis-sqlite
package:
mamba install -c conda-forge ibis-sqlite
Connect using ibis.sqlite.connect
.
Install the ibis-trino
package:
mamba install -c conda-forge ibis-trino
Connect using ibis.trino.connect
.
See the backend support matrix for details on operations supported. Open a feature request if you’d like to see support for an operation in a given backend. If the backend supports it, we’ll do our best to add it quickly!
Community
Community discussions primarily take place on GitHub and Zulip.
Getting started
If you’re interested in trying Ibis we recommend the getting started tutorial.