diff --git a/README.md b/README.md index fdb5eb4a89..feb61ec8f8 100644 --- a/README.md +++ b/README.md @@ -66,8 +66,7 @@ The `deltalake` library aims to adopt patterns from other libraries in data proc so getting started should look familiar. ```py3 -from deltalake import DeltaTable -from deltalake.write import write_deltalake +from deltalake import DeltaTable, write_deltalake import pandas as pd # write some data into a delta table @@ -139,7 +138,7 @@ of features outlined in the Delta [protocol][protocol] is also [tracked](#protoc | S3 - R2 | ![done] | ![done] | requires lock for concurrent writes | | Azure Blob | ![done] | ![done] | | | Azure ADLS Gen2 | ![done] | ![done] | | -| Microsoft OneLake | [![open]][onelake-rs] | [![open]][onelake-rs] | | +| Microsoft OneLake | ![done] | ![done] | | | Google Cloud Storage | ![done] | ![done] | | ### Supported Operations diff --git a/python/docs/source/usage.rst b/python/docs/source/usage.rst index 5dc5a0959e..6711e7e777 100644 --- a/python/docs/source/usage.rst +++ b/python/docs/source/usage.rst @@ -75,13 +75,16 @@ For Databricks Unity Catalog authentication, use environment variables: * DATABRICKS_ACCESS_TOKEN .. code-block:: python - - >>> from deltalake import DataCatalog, DeltaTable - >>> catalog_name = 'main' - >>> schema_name = 'db_schema' - >>> table_name = 'db_table' - >>> data_catalog = DataCatalog.UNITY - >>> dt = DeltaTable.from_data_catalog(data_catalog=data_catalog, data_catalog_id=catalog_name, database_name=schema_name, table_name=table_name) + + >>> import os + >>> from deltalake import DataCatalog, DeltaTable + >>> os.environ['DATABRICKS_WORKSPACE_URL'] = "https://adb-62800498333851.30.azuredatabricks.net" + >>> os.environ['DATABRICKS_ACCESS_TOKEN'] = "" + >>> catalog_name = 'main' + >>> schema_name = 'db_schema' + >>> table_name = 'db_table' + >>> data_catalog = DataCatalog.UNITY + >>> dt = DeltaTable.from_data_catalog(data_catalog=data_catalog, data_catalog_id=catalog_name, database_name=schema_name, table_name=table_name) .. _`s3 options`: https://docs.rs/object_store/latest/object_store/aws/enum.AmazonS3ConfigKey.html#variants .. _`azure options`: https://docs.rs/object_store/latest/object_store/azure/enum.AzureConfigKey.html#variants @@ -458,7 +461,7 @@ DataFrame, a PyArrow Table, or an iterator of PyArrow Record Batches. .. code-block:: python - >>> from deltalake.writer import write_deltalake + >>> from deltalake import write_deltalake >>> df = pd.DataFrame({'x': [1, 2, 3]}) >>> write_deltalake('path/to/table', df) @@ -492,7 +495,7 @@ the method will raise an error. .. code-block:: python - >>> from deltalake.writer import write_deltalake + >>> from deltalake import write_deltalake >>> df = pd.DataFrame({'x': [1, 2, 3], 'y': ['a', 'a', 'b']}) >>> write_deltalake('path/to/table', df, partition_by=['y'])