from pyspark.context import SparkContext
from pyspark.sql import SparkSession

aws_region = "<your-region>"
aws_account_id = "<your-account-id>"
local_catalogname = "spark_catalog"
warehouse_path = "file:///tmp/spark-warehouse"

redshift_catalogname = "redshiftcatalog"
redshift_accountid_catalogname = f"{aws_account_id}:redshift_salescatalog/dev"
redshift_catalog_arn = f"arn:aws:glue:{aws_region}:{redshift_accountid_catalogname}"
redshift_database = "sales"
redshift_table = "store_sales"

s3tables_catalogname = "s3tablesbucket"
s3tables_account_catalogname = f"{aws_account_id}:s3tablescatalog/tbacblog-customer-bucket"
s3tables_database = "tbacblog_namespace"
s3tables_table = "customer"


spark = SparkSession.builder.appName('tbacblog') \
    .config('spark.sql.extensions','org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions') \
    .config(f'spark.sql.catalog.{local_catalogname}', 'org.apache.iceberg.spark.SparkSessionCatalog') \
    .config(f'spark.sql.catalog.{local_catalogname}.catalog-impl', 'org.apache.iceberg.aws.glue.GlueCatalog') \
    .config(f'spark.sql.catalog.{local_catalogname}.client.region', aws_region) \
    .config(f'spark.sql.catalog.{local_catalogname}.glue.account-id', aws_account_id) \
    .config(f'spark.sql.catalog.{local_catalogname}.io-impl', 'org.apache.iceberg.aws.s3.S3FileIO') \
    .config(f'spark.sql.catalog.{local_catalogname}.warehouse',warehouse_path) \
    .config(f'spark.sql.catalog.{s3tables_catalogname}', 'org.apache.iceberg.spark.SparkCatalog') \
    .config(f'spark.sql.catalog.{s3tables_catalogname}.catalog-impl', 'org.apache.iceberg.aws.glue.GlueCatalog') \
    .config(f'spark.sql.catalog.{s3tables_catalogname}.client.region', aws_region) \
    .config(f'spark.sql.catalog.{s3tables_catalogname}.glue.account-id', aws_account_id) \
    .config(f'spark.sql.catalog.{s3tables_catalogname}.glue.id', s3tables_account_catalogname) \
    .config(f'spark.sql.catalog.{redshift_catalogname}', 'org.apache.iceberg.spark.SparkCatalog') \
    .config(f'spark.sql.catalog.{redshift_catalogname}.catalog-impl', 'org.apache.iceberg.aws.glue.GlueCatalog') \
    .config(f'spark.sql.catalog.{redshift_catalogname}.glue.id', redshift_accountid_catalogname) \
    .config(f'spark.sql.catalog.{redshift_catalogname}.glue.account-id', aws_account_id) \
    .config(f'spark.sql.catalog.{redshift_catalogname}.client.region', aws_region) \
    .config(f'spark.sql.catalog.{redshift_catalogname}.glue.catalog-arn', redshift_catalog_arn) \
    .getOrCreate()

spark.sql(f"show databases").show()

# Read from Redshift Federated table
spark.sql(f"SHOW NAMESPACES IN {redshift_catalogname}").show()
spark.sql(f"SHOW TABLES IN {redshift_catalogname}.{redshift_database}").show()
spark.sql(f"SELECT * FROM {redshift_catalogname}.{redshift_database}.{redshift_table}").show()


# Read from S3 table bucket catalog
spark.sql(f"SHOW NAMESPACES IN {s3tables_catalogname}").show()
spark.sql(f"SHOW TABLES IN {s3tables_catalogname}.{s3tables_database}").show()
spark.sql(f"SELECT * FROM {s3tables_catalogname}.{s3tables_database}.{s3tables_table}").show()
