in SamplesV1/ParameterizedPipelinesForAzureML/DeployDataFactory/DeployDataFactory/CopyPipeline.cs [111:190]
private static void CreateInputOutputDatasets(
string resourceGroupName,
string dataFactoryName,
DataFactoryManagementClient client,
out string Dataset_Source,
out string Dataset_Destination)
{
// create input and output tables
Console.WriteLine("Creating input and output tables");
Dataset_Source = "DatasetBlobSource";
Dataset_Destination = "DatasetBlobDestination";
client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName,
new DatasetCreateOrUpdateParameters()
{
Dataset = new Dataset()
{
Name = Dataset_Source,
Properties = new DatasetProperties()
{
LinkedServiceName = "LinkedService-AzureStorage",
TypeProperties = new AzureBlobDataset()
{
FolderPath = "sample/",
FileName = "input.txt"
},
External = true,
Availability = new Availability()
{
Frequency = SchedulePeriod.Minute,
Interval = 15,
},
Policy = new Policy()
{
Validation = new ValidationPolicy()
{
MinimumRows = 1
}
}
}
}
});
client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName,
new DatasetCreateOrUpdateParameters()
{
Dataset = new Dataset()
{
Name = Dataset_Destination,
Properties = new DatasetProperties()
{
LinkedServiceName = "LinkedService-AzureStorage",
TypeProperties = new AzureBlobDataset()
{
FolderPath = "sample/output/{Slice}",
PartitionedBy = new Collection<Partition>()
{
new Partition()
{
Name = "Slice",
Value = new DateTimePartitionValue()
{
Date = "SliceStart",
Format = "yyyyMMdd-HHmm"
}
}
}
},
Availability = new Availability()
{
Frequency = SchedulePeriod.Minute,
Interval = 15,
},
}
}
});
}