![](/img/trans.png)
[英]How to connect ASP NET web app client with NET framework console app server
[英]How to connect a .net console app client to azure data factory
我創建了一個 Azure 數據工廠管道,如下所示:
Azure SQL Database --> CSV Blob storage --> Azure Hyperscale Citus.
但是這個管道是在 azure 數據工廠 UI 上創建的。
現在我想通過控制台應用程序創建這個管道。
1.)但我無法弄清楚如何驗證客戶端並連接到數據工廠以執行管道。
2.) 這是轉換 sql DB 的最佳方式嗎?-> 超大規模
If you want to create Azure Data Factory pipeline with C# console application, we can use package Microsoft.Azure.Management.DataFactory
. 更多詳細信息,請參閱文檔
例如(我創建了一個管道將 blob 從一個容器復制到另一個容器)
Contributor
角色分配給 spz login
az account set --subscription "<your subscription id>"
# the sp will have Azure Contributor role
az ad sp create-for-rbac -n "readMetric"
Install-Package Microsoft.Azure.Management.DataFactory
Install-Package Microsoft.Azure.Management.ResourceManager -IncludePrerelease
Install-Package Microsoft.IdentityModel.Clients.ActiveDirectory
string tenantID = "<your tenant ID>";
string applicationId = "<your application ID>";
string authenticationKey = "<your authentication key for the application>";
string subscriptionId = "<your subscription ID where the data factory resides>";
string resourceGroup = "<your resource group where the data factory resides>";
string region = "<the location of your resource group>";
string dataFactoryName =
"<specify the name of data factory ";
var context = new AuthenticationContext("https://login.windows.net/" + tenantID);
ClientCredential cc = new ClientCredential(applicationId, authenticationKey);
AuthenticationResult result = context.AcquireTokenAsync(
"https://management.azure.com/", cc).Result;
ServiceClientCredentials cred = new TokenCredentials(result.AccessToken);
var client = new DataFactoryManagementClient(cred) {
SubscriptionId = subscriptionId };
string storageAccount = "<your storage account name to copy data>";
string storageKey = "<your storage account key>";
string storageLinkedServiceName = "AzureStorageLinkedService";
LinkedServiceResource storageLinkedService = new LinkedServiceResource(
new AzureStorageLinkedService
{
ConnectionString = new SecureString(
"DefaultEndpointsProtocol=https;AccountName=" + storageAccount +
";AccountKey=" + storageKey)
}
);
await client.LinkedServices.CreateOrUpdateAsync(
resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService);
string blobDatasetName = "BlobDataset";
DatasetResource blobDataset = new DatasetResource(
new AzureBlobDataset
{
LinkedServiceName = new LinkedServiceReference
{
ReferenceName = storageLinkedServiceName
},
FolderPath = new Expression { Value = "@{dataset().path}" },
Parameters = new Dictionary<string, ParameterSpecification>
{
{ "path", new ParameterSpecification { Type = ParameterType.String } }
}
}
);
await client.Datasets.CreateOrUpdateAsync(
resourceGroup, dataFactoryName, blobDatasetName, blobDataset);
string pipelineName = "mypipeline";
PipelineResource pipeline = new PipelineResource
{
Parameters = new Dictionary<string, ParameterSpecification>
{
{ "inputPath", new ParameterSpecification { Type = ParameterType.String } },
{ "outputPath", new ParameterSpecification { Type = ParameterType.String } }
},
Activities = new List<Activity>
{
new CopyActivity
{
Name = "CopyFromBlobToBlob",
Inputs = new List<DatasetReference>
{
new DatasetReference()
{
ReferenceName = blobDatasetName,
Parameters = new Dictionary<string, object>
{
{ "path", "@pipeline().parameters.inputPath" }
}
}
},
Outputs = new List<DatasetReference>
{
new DatasetReference
{
ReferenceName = blobDatasetName,
Parameters = new Dictionary<string, object>
{
{ "path", "@pipeline().parameters.outputPath" }
}
}
},
Source = new BlobSource { },
Sink = new BlobSink { }
}
}
};
await client.Pipelines.CreateOrUpdateAsync(resourceGroup, dataFactoryName, pipelineName, pipeline);
// specify the container and input folder from which all files
string inputBlobPath =
"<path to existing blob(s) to copy data from, e.g. containername/inputdir>";
//specify the contains and output folder where the files are copied
string outputBlobPath =
"<the blob path to copy data to, e.g. containername/outputdir>";
Dictionary<string, object> parameters = new Dictionary<string, object>
{
{ "inputPath", inputBlobPath },
{ "outputPath", outputBlobPath }
};
var runResponse = await client.Pipelines.CreateRunWithHttpMessagesAsync(
resourceGroup, dataFactoryName, pipelineName, parameters: parameters
);
//Checking pipeline run status...
Console.WriteLine("Checking pipeline run status...");
PipelineRun pipelineRun;
while (true)
{
pipelineRun = client.PipelineRuns.Get(
resourceGroup, dataFactoryName, runResponse.Body.RunId);
Console.WriteLine("Status: " + pipelineRun.Status);
if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued")
System.Threading.Thread.Sleep(15000);
else
break;
}
// Check the copy activity run details
Console.WriteLine("Checking copy activity run details...");
RunFilterParameters filterParams = new RunFilterParameters(
DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10));
ActivityRunsQueryResponse queryResponse = await client.ActivityRuns.QueryByPipelineRunAsync(
resourceGroup, dataFactoryName, runResponse.Body.RunId, filterParams);
if (pipelineRun.Status == "Succeeded")
Console.WriteLine(queryResponse.Value.First().Output);
else
Console.WriteLine(queryResponse.Value.First().Error);
Console.WriteLine("\nPress any key to exit...");
Console.ReadKey();
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.