Hi @Victor Seifert ,
An alternate way is to leverage the library and upload custom package.
BigDataPoolsOperationsExtensions.BeginCreateOrUpdate Method (Microsoft.Azure.Management.Synapse) - Azure for .NET Developers | Microsoft Learn
Something like this
// Custom packages can only be added after the initial spark pool creation, otherwise Azure throws an error
if (this.sparkPoolEntity.HasCustomPackages && false)
{
// Requirements file is specified, use it.
string requirementsFileContents = null;
if (!string.IsNullOrEmpty(this.sparkPoolEntity.RequirementsFilePath))
{
var requirementsFilePath = PathManager.Instance.GetFilePath(this.sparkPoolEntity.RequirementsFilePath);
var requirementsFileName = Path.GetFileName(requirementsFilePath);
requirementsFileContents = File.ReadAllText(requirementsFilePath);
sparkPoolInfo.LibraryRequirements = new LibraryRequirements(DateTime.UtcNow, requirementsFileContents, requirementsFileName);
}
sparkPoolInfo.CustomLibraries = await this.GetPythonLibrariesToDeploy();
var sparkPool = await resourceManagement.GetSparkPoolAsync(this.sparkPoolEntity.ResourceGroup, this.sparkPoolEntity.Workspacename, this.sparkPoolEntity.SparkPoolName);
var librariesToUpdate = sparkPoolInfo.CustomLibraries.Select(l => l.Name);
var existingLibraries = (sparkPool.CustomLibraries ?? new List<LibraryInfo>()).Select(l => l.Name);
var librariesChanged = !Enumerable.SequenceEqual(librariesToUpdate.OrderBy(e => e), existingLibraries.OrderBy(e => e));
var requirementsChanged = requirementsFileContents != sparkPool.LibraryRequirements?.Content;
// Skip re-provisioning if there is no change in libraries or requirements
Logger.Instance.LogMessage($"Spark pool {this.sparkPoolEntity.SparkPoolName} libraries changed: {librariesChanged}. Requirements changed: {requirementsChanged}");
if (librariesChanged || requirementsChanged)
{
await WaitForProvisioningToComplete();
Logger.Instance.LogMessage($"Begin re-provisioning of spark pool {this.sparkPoolEntity.SparkPoolName}");
// Use BeginCreateOrUpdate here so the pipeline deployment isn't blocked on the spark pool provisioning
await resourceManagement.BeginCreateOrUpdateSparkPoolAsync(this.sparkPoolEntity.SparkPoolName,
this.sparkPoolEntity.ResourceGroup,
this.sparkPoolEntity.Workspacename,
sparkPoolInfo);
}
else
{
Logger.Instance.LogMessage($"Skip re-provisioning of spark pool {this.sparkPoolEntity.SparkPoolName}");
}
}
Hope this helps. Please let us know how it goes.
-----------
Please consider hitting Accept Answer
button. Accepted answers help community as well.