diff --git a/init/jupyter/gravitino-fileset-example.ipynb b/init/jupyter/gravitino-fileset-example.ipynb index 41e7256f..b2a89bef 100644 --- a/init/jupyter/gravitino-fileset-example.ipynb +++ b/init/jupyter/gravitino-fileset-example.ipynb @@ -53,8 +53,7 @@ "\n", "# Create metalake via Gravitino admin client\n", "metalake_name=\"default\"\n", - "metalake_ident=NameIdentifier.of(metalake_name)\n", - "metalake = gravitino_admin_client.create_metalake(ident=metalake_ident,\n", + "metalake = gravitino_admin_client.create_metalake(name=metalake_name,\n", " comment=\"metalake comment\", \n", " properties={})\n", "print(metalake)" @@ -95,9 +94,8 @@ "source": [ "# Create catalog via Gravition client\n", "catalog_name=\"catalog\"\n", - "catalog_ident=NameIdentifier.of_catalog(metalake_name, catalog_name)\n", "\n", - "catalog = gravitino_client.create_catalog(ident=catalog_ident,\n", + "catalog = gravitino_client.create_catalog(name=catalog_name,\n", " type=Catalog.Type.FILESET,\n", " provider=\"hadoop\", \n", " comment=\"\",\n", @@ -113,7 +111,7 @@ "outputs": [], "source": [ "# Load catalog entity via Gravition client\n", - "catalog = gravitino_client.load_catalog(ident=catalog_ident)\n", + "catalog = gravitino_client.load_catalog(name=catalog_name)\n", "print(catalog)" ] }, @@ -129,8 +127,7 @@ "schema_path=\"/user/datastrato/\"+schema_name\n", "schema_hdfs_path=\"hdfs://hive:9000\"+schema_path\n", "\n", - "schema_ident: NameIdentifier = NameIdentifier.of_schema(metalake_name, catalog_name, schema_name)\n", - "catalog.as_schemas().create_schema(ident=schema_ident, \n", + "catalog.as_schemas().create_schema(schema_name=schema_name, \n", " comment=\"\", \n", " properties={\"location\":schema_hdfs_path})\n", "\n", @@ -155,7 +152,7 @@ "managed_fileset_path=\"/user/datastrato/\"+schema_name+\"/\"+managed_fileset_name\n", "managed_fileset_hdfs_path=\"hdfs://hive:9000\"+managed_fileset_path\n", "\n", - "managed_fileset_ident: NameIdentifier = NameIdentifier.of_fileset(metalake_name, catalog_name, schema_name, managed_fileset_name)\n", + "managed_fileset_ident: NameIdentifier = NameIdentifier.of(schema_name, managed_fileset_name)\n", "catalog.as_fileset_catalog().create_fileset(ident=managed_fileset_ident,\n", " type=Fileset.Type.MANAGED,\n", " comment=\"\",\n", @@ -187,12 +184,12 @@ "try:\n", " info = hdfs_client.status(external_fileset_path)\n", " print(f\"Success: The storage location {external_fileset_path} was successfully created.\")\n", - " print(\"Details:\", info) # print hdfs path detail informations\n", + " print(\"Details:\", info) # print hdfs path detail information\n", "except Exception:\n", " print(f\"Faild: The storage location {external_fileset_path} was not successfully created.\")\n", "\n", - "# Create a external type of fileset\n", - "external_fileset_ident: NameIdentifier = NameIdentifier.of_fileset(metalake_name, catalog_name, schema_name, external_fileset_name)\n", + "# Create an external type of fileset\n", + "external_fileset_ident: NameIdentifier = NameIdentifier.of(schema_name, external_fileset_name)\n", "catalog.as_fileset_catalog().create_fileset(ident=external_fileset_ident,\n", " type=Fileset.Type.EXTERNAL,\n", " comment=\"\",\n", @@ -208,7 +205,7 @@ "outputs": [], "source": [ "# List all fileset\n", - "catalog = gravitino_client.load_catalog(ident=catalog_ident)\n", + "catalog = gravitino_client.load_catalog(name=catalog_name)\n", "fileset_list: List[NameIdentifier] = catalog.as_fileset_catalog().list_filesets(namespace=managed_fileset_ident.namespace())\n", "print(fileset_list)" ] @@ -221,7 +218,7 @@ "outputs": [], "source": [ "# Load managed fileset\n", - "managed_fileset=gravitino_client.load_catalog(ident=catalog_ident).as_fileset_catalog().load_fileset(ident=managed_fileset_ident)\n", + "managed_fileset=gravitino_client.load_catalog(name=catalog_name).as_fileset_catalog().load_fileset(ident=managed_fileset_ident)\n", "print(managed_fileset)" ] }, @@ -233,7 +230,7 @@ "outputs": [], "source": [ "# Load external fileset\n", - "external_fileset=gravitino_client.load_catalog(ident=catalog_ident).as_fileset_catalog().load_fileset(ident=external_fileset_ident)\n", + "external_fileset=gravitino_client.load_catalog(name=catalog_name).as_fileset_catalog().load_fileset(ident=external_fileset_ident)\n", "print(external_fileset)" ] }, @@ -281,7 +278,7 @@ "outputs": [], "source": [ "# Drop schema\n", - "catalog.as_schemas().drop_schema(ident=schema_ident, cascade=True)\n", + "catalog.as_schemas().drop_schema(schema_name=schema_name, cascade=True)\n", "\n", "# Check schema location if successfully deleted\n", "try:\n", @@ -299,7 +296,7 @@ "outputs": [], "source": [ "# Drop catalog\n", - "result=gravitino_client.drop_catalog(ident=catalog_ident)\n", + "result=gravitino_client.drop_catalog(name=catalog_name)\n", "print(result)" ] }, @@ -311,7 +308,7 @@ "outputs": [], "source": [ "# Drop metalake\n", - "result=gravitino_admin_client.drop_metalake(metalake_ident)\n", + "result=gravitino_admin_client.drop_metalake(metalake_name)\n", "print(result)" ] }