Friday, August 29, 2025

C# connection string for Databricks

 

C# connection string for Databricks

console application program: 

connection string will be:

Driver={Simba Spark ODBC Driver};Host=adb-****.azuredatabricks.net;Port=443;ThriftTransport=2;AuthMech=3;UID=token;PWD=yourgenerated_tokenhere;HTTPPath=/sql/1.0/warehouses/******;SSL=1 


Code here

internal class Program

{

    static void Main(string[] args)

    {

        ExecuteQuery();

    }


    public static void ExecuteQuery()

    {

        string dsn = "dsn=Databricks_QA";


        OdbcConnectionStringBuilder builder = new OdbcConnectionStringBuilder();

        builder.Driver = "{Simba Spark ODBC Driver}"; // Or the exact name of your installed Databricks ODBC driver

        builder.Add("Host", "adb-******.azuredatabricks.net");

        builder.Add("Port", "443"); // Standard port for HTTPS

        builder.Add("ThriftTransport", "2"); 

        builder.Add("AuthMech", "3"); // For Personal Access Token authentication

        builder.Add("UID", "token");

        builder.Add("PWD", "<90 days token heredapi fsflkdf>");

        builder.Add("HTTPPath", "/sql/1.0/warehouses/*****");

        builder.Add("SSL", "1"); // Enable SSL/TLS


        string connectionString = builder.ConnectionString;


        using (OdbcConnection connection = new OdbcConnection(builder.ConnectionString))

        //using (OdbcConnection connection = new OdbcConnection(dsn))

        {

            string s = connection.ConnectionString;

            //connection.Open();

            string qry = "SELECT top 100 * FROM ud_employee.text_data where emp_id='123'";

            using (OdbcCommand command = new OdbcCommand(qry, connection))

            {

                OdbcDataAdapter da = new OdbcDataAdapter(command);

                DataSet dataSet = new DataSet();    

                da.Fill(dataSet);

                string test = "dv";

                //using (OdbcDataReader reader = command.ExecuteReader())

                //{

                //    while (reader.Read())

                //    {                           

                //        Console.WriteLine(reader["emp_name"]);

                //    }

                //}

            }

        }

    }

}


Thursday, June 19, 2025

Databricks: Python

 

Creating like data table: Data frame and inserting data.

from pyspark.sql.types import (
    StructType,
    StructField,
    StringType,
    IntegerType,
    FloatType,
    DoubleType,
    LongType,
    TimestampType,
)

dta12 = [
    (1, "Ganesha", 30, "2021-04-02"),
    (2, "Krishna", 34, "2023-06-01"),
    (3, "Pooja", 53, "2021-01-31"),
    (4, "Archana", 56, "2021-01-28"),
]
sch1 = StructType(
    [
        StructField("id", IntegerType(), True),
        StructField("name", StringType(), True),
        StructField("age", StringType(), True),
        StructField("DateExecute", StringType(), True),
    ]
)
# print(d1);
df1= spark.createDataFrame(dta12, sch1)
display(df1)
df2=df1.filter(col("id")=="4");
#df2=df1.filter(col("id")=="4").select("name");
display(df2);



Table creation in Databricks, Add new rows 1) query 2) DataFrame
from pyspark.sql.types import StructType, StructField, StringType, IntegerType

frm1 = spark.sql("SELECT * FROM dev.databricks_training.ashwini_test")
display(frm1)

### creating data frame
sch2 = StructType(
    [
        StructField("empid1", IntegerType()),
        StructField("empname", StringType()),
        StructField("region", StringType()),
    ]
);
### creting new new to existing dataFrame
frm2=spark.createDataFrame([(123,'Speaker', 'AMIND'),(124,'Mouse', 'AMIND'),(125,'Screen', 'AMIND')], sch2)
display(frm2)
### adding new row by union
frm1=frm1.union(frm2)
display(frm1)

### saving to databricks table
frm1.write.mode("overwrite").saveAsTable(f"dev.databricks_training.ashwini_test")

### filtering columns
# display(frm1.where(frm1["region"]=="amna"));

### inerting record into table by Query execute
# spark.sql("insert into dev.databricks_training.ashwini_test values (19037,'Sri Kumar','HKT')");