diff --git a/src/_modules/data_factory_storage_account/data.tf b/src/_modules/data_factory_storage_account/data.tf
new file mode 100644
index 000000000..fb8fda7eb
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/data.tf
@@ -0,0 +1,20 @@
+data "azurerm_storage_account" "source" {
+ name = var.storage_accounts.source.name
+ resource_group_name = var.storage_accounts.source.resource_group_name
+}
+
+data "azurerm_storage_account" "target" {
+ name = var.storage_accounts.target.name
+ resource_group_name = var.storage_accounts.target.resource_group_name
+}
+
+data "azurerm_storage_containers" "this" {
+ count = var.what_to_migrate.blob.enabled && length(var.what_to_migrate.blob.containers) == 0 ? 1 : 0
+ storage_account_id = data.azurerm_storage_account.source.id
+}
+
+data "azapi_resource_list" "tables" {
+ type = "Microsoft.Storage/storageAccounts/tableServices/tables@2021-09-01"
+ parent_id = "${data.azurerm_storage_account.source.id}/tableServices/default"
+ response_export_values = ["*"]
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/datasets_containers.tf b/src/_modules/data_factory_storage_account/datasets_containers.tf
new file mode 100644
index 000000000..6e4080cc6
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/datasets_containers.tf
@@ -0,0 +1,41 @@
+resource "azurerm_data_factory_custom_dataset" "source_dataset_container" {
+ for_each = toset(local.containers)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ type = "AzureBlob"
+ folder = "${var.storage_accounts.source.name}/source/blob"
+
+ linked_service {
+ name = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_blob[0].name
+ }
+
+ type_properties_json = jsonencode({
+ linkedServiceName = {
+ referenceName = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_blob[0].name
+ type = "LinkedServiceReference"
+ }
+ type = "AzureBlob"
+ folderPath = each.value
+ })
+}
+
+resource "azurerm_data_factory_custom_dataset" "target_dataset_container" {
+ for_each = toset(local.containers)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ type = "AzureBlob"
+ folder = "${var.storage_accounts.source.name}/target/blob"
+
+ linked_service {
+ name = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob[0].name
+ }
+
+ type_properties_json = jsonencode({
+ linkedServiceName = {
+ referenceName = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob[0].name
+ type = "LinkedServiceReference"
+ }
+ type = "AzureBlob"
+ folderPath = each.value
+ })
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/datasets_tables.tf b/src/_modules/data_factory_storage_account/datasets_tables.tf
new file mode 100644
index 000000000..17a0dd8c3
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/datasets_tables.tf
@@ -0,0 +1,30 @@
+resource "azurerm_data_factory_custom_dataset" "source_dataset_table" {
+ for_each = toset(local.tables)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ type = "AzureTable"
+ folder = "${var.storage_accounts.source.name}/source/table"
+
+ linked_service {
+ name = azurerm_data_factory_linked_service_azure_table_storage.source_linked_service_table[0].name
+ }
+
+ type_properties_json = jsonencode({
+ tableName = each.value
+ })
+}
+
+resource "azurerm_data_factory_custom_dataset" "target_dataset_table" {
+ for_each = toset(local.tables)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ type = "AzureTable"
+ folder = "${var.storage_accounts.source.name}/target/table"
+ linked_service {
+ name = azurerm_data_factory_linked_service_azure_table_storage.target_linked_service_table[0].name
+ }
+
+ type_properties_json = jsonencode({
+ tableName = each.value
+ })
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/iam.tf b/src/_modules/data_factory_storage_account/iam.tf
new file mode 100644
index 000000000..6f68a6c00
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/iam.tf
@@ -0,0 +1,40 @@
+module "roles" {
+ source = "github.com/pagopa/dx//infra/modules/azure_role_assignments?ref=main"
+ principal_id = var.data_factory_principal_id
+
+ storage_blob = var.what_to_migrate.blob.enabled ? [
+ {
+ storage_account_name = var.storage_accounts.source.name
+ resource_group_name = var.storage_accounts.source.resource_group_name
+ role = "reader"
+ },
+ {
+ storage_account_name = var.storage_accounts.target.name
+ resource_group_name = var.storage_accounts.target.resource_group_name
+ role = "writer"
+ }
+ ] : []
+
+ # ADF terraform resources still force to use connection strings for tables
+ # but it's possible to switch to managed identities from the portal
+ storage_table = var.what_to_migrate.table.enabled ? [
+ {
+ storage_account_name = var.storage_accounts.source.name
+ resource_group_name = var.storage_accounts.source.resource_group_name
+ role = "reader"
+ },
+ {
+ storage_account_name = var.storage_accounts.target.name
+ resource_group_name = var.storage_accounts.target.resource_group_name
+ role = "writer"
+ }
+ ] : []
+}
+
+# Permission needed to allow Data Factory to create tables in the target storage account
+resource "azurerm_role_assignment" "storage_account_contributor" {
+ count = var.what_to_migrate.table.enabled ? 1 : 0
+ scope = data.azurerm_storage_account.target.id
+ role_definition_name = "Storage Account Contributor"
+ principal_id = var.data_factory_principal_id
+}
diff --git a/src/_modules/data_factory_storage_account/linked_services_containers.tf b/src/_modules/data_factory_storage_account/linked_services_containers.tf
new file mode 100644
index 000000000..3a16233e4
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/linked_services_containers.tf
@@ -0,0 +1,19 @@
+resource "azurerm_data_factory_linked_service_azure_blob_storage" "source_linked_service_blob" {
+ count = var.what_to_migrate.blob.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-blob-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+
+ service_endpoint = "https://${data.azurerm_storage_account.source.name}.blob.core.windows.net"
+
+ use_managed_identity = true
+}
+
+resource "azurerm_data_factory_linked_service_azure_blob_storage" "target_linked_service_blob" {
+ count = var.what_to_migrate.blob.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-blob-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+
+ service_endpoint = "https://${data.azurerm_storage_account.target.name}.blob.core.windows.net"
+
+ use_managed_identity = true
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/linked_services_tables.tf b/src/_modules/data_factory_storage_account/linked_services_tables.tf
new file mode 100644
index 000000000..c343b3a71
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/linked_services_tables.tf
@@ -0,0 +1,15 @@
+resource "azurerm_data_factory_linked_service_azure_table_storage" "source_linked_service_table" {
+ count = var.what_to_migrate.table.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-table-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+
+ connection_string = data.azurerm_storage_account.source.primary_connection_string
+}
+
+resource "azurerm_data_factory_linked_service_azure_table_storage" "target_linked_service_table" {
+ count = var.what_to_migrate.table.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-table-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+
+ connection_string = data.azurerm_storage_account.target.primary_connection_string
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/locals.tf b/src/_modules/data_factory_storage_account/locals.tf
new file mode 100644
index 000000000..2ec34c586
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/locals.tf
@@ -0,0 +1,6 @@
+locals {
+ containers = var.what_to_migrate.blob.enabled ? (length(var.what_to_migrate.blob.containers) > 0 ? var.what_to_migrate.blob.containers : [for container in data.azurerm_storage_containers.this[0].containers : container.name]) : []
+
+ azapi_tables = jsondecode(data.azapi_resource_list.tables.output)
+ tables = var.what_to_migrate.table.enabled ? (length(var.what_to_migrate.table.tables) > 0 ? var.what_to_migrate.table.tables : [for table in local.azapi_tables.value : table.name]) : []
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/main.tf b/src/_modules/data_factory_storage_account/main.tf
new file mode 100644
index 000000000..70b9d5218
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/main.tf
@@ -0,0 +1,21 @@
+terraform {
+ required_providers {
+ azapi = {
+ source = "Azure/azapi"
+ version = "<= 1.15.0"
+ }
+ }
+}
+
+module "naming_convention" {
+ source = "github.com/pagopa/dx//infra/modules/azure_naming_convention/?ref=main"
+
+ environment = {
+ prefix = var.environment.prefix
+ env_short = var.environment.env_short
+ location = var.environment.location
+ domain = var.environment.domain
+ app_name = var.environment.app_name
+ instance_number = var.environment.instance_number
+ }
+}
diff --git a/src/_modules/data_factory_storage_account/network.tf b/src/_modules/data_factory_storage_account/network.tf
new file mode 100644
index 000000000..fffdc0751
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/network.tf
@@ -0,0 +1,31 @@
+resource "azurerm_data_factory_managed_private_endpoint" "blob_source" {
+ count = var.what_to_migrate.blob.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-blob-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+ target_resource_id = data.azurerm_storage_account.source.id
+ subresource_name = "blob"
+}
+
+resource "azurerm_data_factory_managed_private_endpoint" "blob_target" {
+ count = var.what_to_migrate.blob.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-blob-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+ target_resource_id = data.azurerm_storage_account.target.id
+ subresource_name = "blob"
+}
+
+resource "azurerm_data_factory_managed_private_endpoint" "table_source" {
+ count = var.what_to_migrate.table.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-table-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+ target_resource_id = data.azurerm_storage_account.source.id
+ subresource_name = "table"
+}
+
+resource "azurerm_data_factory_managed_private_endpoint" "table_target" {
+ count = var.what_to_migrate.table.enabled ? 1 : 0
+ name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-table-${module.naming_convention.suffix}"
+ data_factory_id = var.data_factory_id
+ target_resource_id = data.azurerm_storage_account.target.id
+ subresource_name = "table"
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/outputs.tf b/src/_modules/data_factory_storage_account/outputs.tf
new file mode 100644
index 000000000..2153f8777
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/outputs.tf
@@ -0,0 +1,10 @@
+output "pipelines" {
+ value = {
+ for pipeline in merge(azurerm_data_factory_pipeline.pipeline_container, azurerm_data_factory_pipeline.pipeline_table)
+ : pipeline.name => {
+ id = pipeline.id
+ name = pipeline.name
+ url = "https://adf.azure.com/en/authoring/pipeline/${pipeline.name}?factory=${pipeline.data_factory_id}"
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/pipeline_containers.tf b/src/_modules/data_factory_storage_account/pipeline_containers.tf
new file mode 100644
index 000000000..7828c686f
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/pipeline_containers.tf
@@ -0,0 +1,60 @@
+resource "azurerm_data_factory_pipeline" "pipeline_container" {
+ for_each = toset(local.containers)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ folder = "${var.storage_accounts.source.name}/blob"
+
+ activities_json = jsonencode(
+ [
+ {
+ name = "CopyActivity"
+ type = "Copy"
+ dependsOn = []
+ policy = {
+ timeout = "0.12:00:00"
+ retry = 0
+ retryIntervalInSeconds = 30
+ secureOutput = false
+ secureInput = false
+ }
+ userProperties = []
+ typeProperties = {
+ source = {
+ type = "BinarySource"
+ storeSettings = {
+ type = "AzureBlobStorageReadSettings"
+ recursive = true
+ enablePartitionDiscovery = false
+ wildcardFileName = "*" # Copy all files
+ }
+ formatSettings = {
+ type = ""
+ }
+ }
+ sink = {
+ type = "BinarySink"
+ storeSettings = {
+ type = "AzureBlobStorageWriteSettings"
+ }
+ formatSettings = {
+ type = ""
+ }
+ }
+ enableStaging = false
+ }
+ inputs = [
+ {
+ referenceName = azurerm_data_factory_custom_dataset.source_dataset_container[each.value].name
+ type = "DatasetReference"
+ }
+ ]
+ outputs = [
+ {
+ referenceName = azurerm_data_factory_custom_dataset.target_dataset_container[each.value].name
+ type = "DatasetReference"
+ }
+ ]
+ }
+ ]
+ )
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/pipeline_tables.tf b/src/_modules/data_factory_storage_account/pipeline_tables.tf
new file mode 100644
index 000000000..54f086b77
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/pipeline_tables.tf
@@ -0,0 +1,58 @@
+resource "azurerm_data_factory_pipeline" "pipeline_table" {
+ for_each = toset(local.tables)
+ name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
+ data_factory_id = var.data_factory_id
+ folder = "${var.storage_accounts.source.name}/table"
+
+ activities_json = jsonencode(
+ [
+ {
+ name = "CopyActivity"
+ type = "Copy"
+ dependsOn = []
+ policy = {
+ timeout = "0.12:00:00"
+ retry = 0
+ retryIntervalInSeconds = 30
+ secureOutput = false
+ secureInput = false
+ }
+ userProperties = []
+ typeProperties = {
+ source = {
+ type = "AzureTableSource"
+ azureTableSourceIgnoreTableNotFound = false
+ }
+ sink = {
+ type = "AzureTableSink"
+ writeBatchSize = 10000
+ writeBatchTimeout = "00:02:00"
+ azureTableInsertType = "merge",
+ azureTablePartitionKeyName = {
+ value = "PartitionKey",
+ type = "Expression"
+ },
+ azureTableRowKeyName = {
+ value = "RowKey",
+ type = "Expression"
+ },
+ }
+ enableStaging = false
+ }
+ inputs = [
+ {
+ referenceName = azurerm_data_factory_custom_dataset.source_dataset_table[each.value].name
+ type = "DatasetReference"
+ }
+ ]
+ outputs = [
+ {
+ referenceName = azurerm_data_factory_custom_dataset.target_dataset_table[each.value].name
+ type = "DatasetReference"
+ }
+ ]
+ }
+ ]
+ )
+
+}
\ No newline at end of file
diff --git a/src/_modules/data_factory_storage_account/variables.tf b/src/_modules/data_factory_storage_account/variables.tf
new file mode 100644
index 000000000..dcdc4210a
--- /dev/null
+++ b/src/_modules/data_factory_storage_account/variables.tf
@@ -0,0 +1,63 @@
+variable "environment" {
+ type = object({
+ prefix = string
+ env_short = string
+ location = string
+ domain = optional(string)
+ app_name = string
+ instance_number = string
+ })
+
+ description = "Values which are used to generate resource names and location short names. They are all mandatory except for domain, which should not be used only in the case of a resource used by multiple domains."
+}
+
+variable "data_factory_id" {
+ description = "Data Factory id where to create resources."
+ type = string
+}
+
+variable "data_factory_principal_id" {
+ description = "Data Factory principal id to grant access to."
+ type = string
+}
+
+variable "storage_accounts" {
+ type = object({
+ source = object({
+ name = string
+ resource_group_name = string
+ })
+
+ target = object({
+ name = string
+ resource_group_name = string
+ })
+ })
+}
+
+variable "what_to_migrate" {
+ type = object({
+ blob = optional(object(
+ {
+ enabled = optional(bool, true)
+ containers = optional(list(string), [])
+ }),
+ { enabled = true, containers = [] }
+ )
+ table = optional(object(
+ {
+ enabled = optional(bool, true)
+ tables = optional(list(string), [])
+ }),
+ { enabled = true, tables = [] }
+ )
+ })
+
+ # validate that at least one between blob and table is enabled
+ validation {
+ condition = anytrue([var.what_to_migrate.blob.enabled, var.what_to_migrate.table.enabled])
+ error_message = "At least one between blob and table should be enabled."
+ }
+
+ description = "List of storage account containers and tables to migrate."
+}
diff --git a/src/migration/prod/.terraform.lock.hcl b/src/migration/prod/.terraform.lock.hcl
new file mode 100644
index 000000000..54894b893
--- /dev/null
+++ b/src/migration/prod/.terraform.lock.hcl
@@ -0,0 +1,48 @@
+# This file is maintained automatically by "terraform init".
+# Manual edits may be lost in future updates.
+
+provider "registry.terraform.io/azure/azapi" {
+ version = "1.15.0"
+ constraints = "<= 1.15.0"
+ hashes = [
+ "h1:5aoSqVISTygtAD42asvbglV/bMqjMvTA2RmuuPz87Ic=",
+ "h1:W41dtPI1BFKkDtLbKWLxGJ4L5ntAFZ5BJYZT+04+jk4=",
+ "h1:Y7ruMuPh8UJRTRl4rm+cdpGtmURx2taqiuqfYaH3o48=",
+ "h1:pO/phGY+TxMEKQ+ffYj+vUIvG5A1tno/sZYDb/yyA/w=",
+ "zh:0627a8bc77254debc25dc0c7b62e055138217c97b03221e593c3c56dc7550671",
+ "zh:2fe045f07070ef75d0bec4b0595a74c14394daa838ddb964e2fd23cc98c40c34",
+ "zh:343009f39c957883b2c06145a5954e524c70f93585f943f1ea3d28ef6995d0d0",
+ "zh:53fe9ab54485aaebc9b91e27a10bce2729a1c95b1399079e631dc6bb9e3f27dc",
+ "zh:63c407e7dc04d178d4798c17ad489d9cc92f7d1941d7f4a3f560b95908b6107b",
+ "zh:7d6fc2b432b264f036bb80ab2b2ba67f80a5d98da8a8c322aa097833dad598c9",
+ "zh:7ec49c0a8799d469eb6e2a1f856693f9862f1b73f5ed70adc1b346e5a4c6458d",
+ "zh:889704f10319d301d677539d788fc82a7c73608ab78cb93e1280ac2be39e6e00",
+ "zh:90b4b07405b7cde9ebae3b034cb5bb5dd18484d1b95bd250f905451f1e86ac3f",
+ "zh:92aa9c241a8cb2a6d81ad47bc007c119f8b818464a960ebaf39008766c361e6b",
+ "zh:f28fbd0a2c59e239b53067bc1adc691be444876bcb2d4f78d310f549724da6e0",
+ "zh:ffb15e0ddfa505d0e9b75341570199076ae574887124f398162b1ead9376b25f",
+ ]
+}
+
+provider "registry.terraform.io/hashicorp/azurerm" {
+ version = "3.116.0"
+ constraints = "~> 3.106, <= 3.116.0"
+ hashes = [
+ "h1:2QbjtN4oMXzdA++Nvrj/wSmWZTPgXKOSFGGQCLEMrb4=",
+ "h1:BCR3NIorFSvGG3v/+JOiiw3VM4PkChLO4m84wzD9NDo=",
+ "h1:SJM/KQDW9blKFmLMaupsZVYtcZ0fYpjLHEriMgCBGCY=",
+ "h1:jwwbQ09fH1RdcNsknt1AkvfSUbULsl7nZQn6S8fabFI=",
+ "zh:02b6606aff025fc2a962b3e568e000300abe959adac987183c24dac8eb057f4d",
+ "zh:2a23a8ce24ff9e885925ffee0c3ea7eadba7a702541d05869275778aa47bdea7",
+ "zh:57d10746384baeca4d5c56e88872727cdc150f437b8c5e14f0542127f7475e24",
+ "zh:59e3ebde1a2e1e094c671e179f231ead60684390dbf02d2b1b7fe67a228daa1a",
+ "zh:5f1f5c7d09efa2ee8ddf21bd9efbbf8286f6e90047556bef305c062fa0ac5880",
+ "zh:a40646aee3c9907276dab926e6123a8d70b1e56174836d4c59a9992034f88d70",
+ "zh:c21d40461bc5836cf56ad3d93d2fc47f61138574a55e972ad5ff1cb73bab66dc",
+ "zh:c56fb91a5ae66153ba0f737a26da1b3d4f88fdef7d41c63e06c5772d93b26953",
+ "zh:d1e60e85f51d12fc150aeab8e31d3f18f859c32f927f99deb5b74cb1e10087aa",
+ "zh:ed35e727e7d79e687cd3d148f52b442961ede286e7c5b4da1dcd9f0128009466",
+ "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
+ "zh:f6d2a4e7c58f44e7d04a4a9c73f35ed452f412c97c85def68c4b52814cbe03ab",
+ ]
+}
diff --git a/src/migration/prod/README.md b/src/migration/prod/README.md
new file mode 100644
index 000000000..37297372f
--- /dev/null
+++ b/src/migration/prod/README.md
@@ -0,0 +1,41 @@
+# prod
+
+
+## Requirements
+
+| Name | Version |
+|------|---------|
+| [azapi](#requirement\_azapi) | <= 1.15.0 |
+| [azurerm](#requirement\_azurerm) | <= 3.116.0 |
+
+## Providers
+
+| Name | Version |
+|------|---------|
+| [azurerm](#provider\_azurerm) | 3.116.0 |
+
+## Modules
+
+| Name | Source | Version |
+|------|--------|---------|
+| [migrate\_storage\_accounts](#module\_migrate\_storage\_accounts) | ../../_modules/data_factory_storage_account | n/a |
+
+## Resources
+
+| Name | Type |
+|------|------|
+| [azurerm_data_factory.this](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/data_factory) | resource |
+| [azurerm_data_factory_integration_runtime_azure.azure_runtime](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/data_factory_integration_runtime_azure) | resource |
+| [azurerm_resource_group.migration](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/resource_group) | resource |
+
+## Inputs
+
+No inputs.
+
+## Outputs
+
+| Name | Description |
+|------|-------------|
+| [data\_factory](#output\_data\_factory) | n/a |
+| [data\_factory\_st\_pipelines](#output\_data\_factory\_st\_pipelines) | n/a |
+
diff --git a/src/migration/prod/italynorth.tf b/src/migration/prod/italynorth.tf
new file mode 100644
index 000000000..2d777fb85
--- /dev/null
+++ b/src/migration/prod/italynorth.tf
@@ -0,0 +1,56 @@
+resource "azurerm_resource_group" "migration" {
+ name = "${local.project_itn}-${local.environment.app_name}-rg-${local.environment.instance_number}"
+ location = "italynorth"
+
+ tags = local.tags
+}
+
+# Create Azure Data Factory instances
+# Enables system-assigned managed identity for secure access to resources
+resource "azurerm_data_factory" "this" {
+ name = "${local.project_itn}-${local.environment.app_name}-adf-${local.environment.instance_number}"
+ location = "italynorth"
+ resource_group_name = azurerm_resource_group.migration.name
+
+ public_network_enabled = false
+ managed_virtual_network_enabled = true
+
+ identity {
+ type = "SystemAssigned"
+ }
+
+ tags = local.tags
+}
+
+resource "azurerm_data_factory_integration_runtime_azure" "azure_runtime" {
+ name = "${local.project_itn}-${local.environment.app_name}-adfir-${local.environment.instance_number}"
+ location = "italynorth"
+ data_factory_id = azurerm_data_factory.this.id
+}
+
+module "migrate_storage_accounts" {
+ for_each = { for migration in local.storage_accounts : "${migration.source.name}|${migration.target.name}" => migration }
+ source = "../../_modules/data_factory_storage_account"
+
+ environment = local.environment
+
+ data_factory_id = azurerm_data_factory.this.id
+ data_factory_principal_id = azurerm_data_factory.this.identity[0].principal_id
+
+ storage_accounts = {
+ source = each.value.source
+ target = each.value.target
+ }
+
+ what_to_migrate = {
+ blob = {
+ enabled = try(each.value.blob.enabled, true)
+ containers = try(each.value.blob.containers, [])
+ }
+
+ table = {
+ enabled = try(each.value.table.enabled, true)
+ tables = try(each.value.table.tables, [])
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/migration/prod/locals.tf b/src/migration/prod/locals.tf
new file mode 100644
index 000000000..5cd76e9d7
--- /dev/null
+++ b/src/migration/prod/locals.tf
@@ -0,0 +1,41 @@
+locals {
+ prefix = "io"
+ env_short = "p"
+ location_short = { westeurope = "weu", italynorth = "itn", germanywestcentral = "gwc", northeurope = "neu" }
+ project_itn = "${local.prefix}-${local.env_short}-${local.location_short.italynorth}"
+ project_weu = "${local.prefix}-${local.env_short}-${local.location_short.westeurope}"
+ project_weu_legacy = "${local.prefix}-${local.env_short}"
+ secondary_project = "${local.prefix}-${local.env_short}-${local.location_short.germanywestcentral}"
+
+ environment = {
+ prefix = local.prefix
+ env_short = local.env_short
+ location = "italynorth"
+ app_name = "migration"
+ instance_number = "01"
+ }
+
+ tags = {
+ CostCenter = "TS310 - PAGAMENTI & SERVIZI"
+ CreatedBy = "Terraform"
+ Environment = "Prod"
+ Owner = "IO"
+ Source = "https://github.com/pagopa/io-infra/blob/main/src/migration/prod"
+ }
+
+ storage_accounts = [
+ # Copy both containers and tables
+ # {
+ # source = { name = "stdevbiptest1", resource_group_name = "RG-BIP-DEV-TEST" }
+ # target = { name = "stbipdevtest1", resource_group_name = "dev-fasanorg" }
+ # },
+ #
+ # Copy only selected containers and tables
+ # {
+ # source = { name = "stdevbiptest1", resource_group_name = "RG-BIP-DEV-TEST" }
+ # target = { name = "stbipdevtest1", resource_group_name = "dev-fasanorg" }
+ # blob = {enabled = true, containers = ["c1", "c2", "c3"]}
+ # table = {enabled = true, tables = ["t1", "t2", "t3"]}
+ # }
+ ]
+}
diff --git a/src/migration/prod/main.tf b/src/migration/prod/main.tf
new file mode 100644
index 000000000..fcca7f12c
--- /dev/null
+++ b/src/migration/prod/main.tf
@@ -0,0 +1,27 @@
+terraform {
+
+ backend "azurerm" {
+ resource_group_name = "terraform-state-rg"
+ storage_account_name = "iopitntfst001"
+ container_name = "terraform-state"
+ key = "io-infra.migration.prod.italynorth.tfstate"
+ }
+
+ required_providers {
+ azurerm = {
+ source = "hashicorp/azurerm"
+ version = "<= 3.116.0"
+ }
+
+ azapi = {
+ source = "Azure/azapi"
+ version = "<= 1.15.0"
+ }
+ }
+}
+
+provider "azurerm" {
+ features {}
+}
+
+provider "azapi" {}
diff --git a/src/migration/prod/outputs.tf b/src/migration/prod/outputs.tf
new file mode 100644
index 000000000..cc35cc082
--- /dev/null
+++ b/src/migration/prod/outputs.tf
@@ -0,0 +1,11 @@
+output "data_factory" {
+ value = {
+ id = azurerm_data_factory.this.id
+ name = azurerm_data_factory.this.name
+ resource_group_name = azurerm_data_factory.this.resource_group_name
+ }
+}
+
+output "data_factory_st_pipelines" {
+ value = { for migration in local.storage_accounts : "${migration.source.name}|${migration.target.name}" => module.migrate_storage_accounts["${migration.source.name}|${migration.target.name}"].pipelines }
+}
\ No newline at end of file