Great vidoe , can you also please make a video to send the Json file from android app ( fetching the file from local download folder of android ) to the One lake root folder or any other folder ? i have tried but its not working
Hi @WafaStudies, I am getting sampleLakehouse.Lakehouse is not found in the Workspace . Why? I have rechecked workspace and lakehouse name, all of them are correct
import os from azure.identity import ClientSecretCredential from azure.storage.filedatalake import ( DataLakeServiceClient, DataLakeDirectoryClient, ) # Set your account, workspace, and item path here ACCOUNT_NAME = "onelake" WORKSPACE_NAME = "" DATA_PATH = "" LOCAL_FILE_PATH = r"" def upload_file_to_directory(directory_client: DataLakeDirectoryClient, local_file_path: str): file_name = os.path.basename(local_file_path) file_client = directory_client.get_file_client(file_name) with open(local_file_path, mode="rb") as data: file_client.upload_data(data, overwrite=True) def main(): # Create a service client using service principal credentials client_id = "" tenant_id = "" client_secret = "" account_url = f"{ACCOUNT_NAME}.dfs.fabric.microsoft.com" credential = ClientSecretCredential(tenant_id, client_id, client_secret) service_client = DataLakeServiceClient(account_url, credential=credential) # Create a file system client for the workspace file_system_client = service_client.get_file_system_client(WORKSPACE_NAME)
# Get the directory client for the specified data path directory_client = file_system_client.get_directory_client(DATA_PATH)
# Upload the local file to the specified directory in the Data Lake storage upload_file_to_directory(directory_client, LOCAL_FILE_PATH) if __name__ == "__main__": main() If you already know how to create service principal , Use this code
Great vidoe , can you also please make a video to send the Json file from android app ( fetching the file from local download folder of android ) to the One lake root folder or any other folder ? i have tried but its not working
Hi @WafaStudies, I am getting sampleLakehouse.Lakehouse is not found in the Workspace . Why? I have rechecked workspace and lakehouse name, all of them are correct
what an explanation 👏 👌 Loved it.
For live project, can we use SPN for auth?
Yes you can
Am not able to install packages
How to use a service principal instead of logging interactively?
import os
from azure.identity import ClientSecretCredential
from azure.storage.filedatalake import (
DataLakeServiceClient,
DataLakeDirectoryClient,
)
# Set your account, workspace, and item path here
ACCOUNT_NAME = "onelake"
WORKSPACE_NAME = ""
DATA_PATH = ""
LOCAL_FILE_PATH = r""
def upload_file_to_directory(directory_client: DataLakeDirectoryClient, local_file_path: str):
file_name = os.path.basename(local_file_path)
file_client = directory_client.get_file_client(file_name)
with open(local_file_path, mode="rb") as data:
file_client.upload_data(data, overwrite=True)
def main():
# Create a service client using service principal credentials
client_id = ""
tenant_id = ""
client_secret = ""
account_url = f"{ACCOUNT_NAME}.dfs.fabric.microsoft.com"
credential = ClientSecretCredential(tenant_id, client_id, client_secret)
service_client = DataLakeServiceClient(account_url, credential=credential)
# Create a file system client for the workspace
file_system_client = service_client.get_file_system_client(WORKSPACE_NAME)
# Get the directory client for the specified data path
directory_client = file_system_client.get_directory_client(DATA_PATH)
# Upload the local file to the specified directory in the Data Lake storage
upload_file_to_directory(directory_client, LOCAL_FILE_PATH)
if __name__ == "__main__":
main()
If you already know how to create service principal , Use this code
This is for loading a data from local computer to Lakehouse using Python
how can we fetch the data from onelake, pls post a video on that
when will you add more videos ??