diff --git a/lib/modules/manager/terraform/lockfile/hash.ts b/lib/modules/manager/terraform/lockfile/hash.ts index 7a4aa15c8cba82..754ddbf83be910 100644 --- a/lib/modules/manager/terraform/lockfile/hash.ts +++ b/lib/modules/manager/terraform/lockfile/hash.ts @@ -120,6 +120,7 @@ export class TerraformProviderHash { logger.trace( `Downloading archive and generating hash for ${build.name}-${build.version}...`, ); + const startTime = Date.now(); const readStream = TerraformProviderHash.http.stream(build.url); const writeStream = fs.createCacheWriteStream(downloadFileName); @@ -127,9 +128,8 @@ export class TerraformProviderHash { await fs.pipeline(readStream, writeStream); const hash = await this.hashOfZipContent(downloadFileName, extractPath); - logger.trace( - { hash }, - `Generated hash for ${build.name}-${build.version}`, + logger.debug( + `Hash generation for ${build.url} took ${Date.now() - startTime}ms for ${build.name}-${build.version}`, ); return hash; } finally { @@ -141,6 +141,7 @@ export class TerraformProviderHash { static async calculateHashScheme1Hashes( builds: TerraformBuild[], ): Promise { + logger.debug(`Calculating hashes for ${builds.length} builds`); const cacheDir = await ensureCacheDir('./others/terraform'); // for each build download ZIP, extract content and generate hash for all containing files @@ -154,6 +155,9 @@ export class TerraformProviderHash { repository: string, version: string, ): Promise { + logger.debug( + `Creating hashes for ${repository}@${version} (${registryURL})`, + ); const builds = await TerraformProviderHash.terraformDatasource.getBuilds( registryURL, repository, @@ -169,6 +173,10 @@ export class TerraformProviderHash { builds.map((build) => build.shasums_url).filter(isNotNullOrUndefined), ); + logger.debug( + `Getting zip hashes for ${shaUrls.length} shasum URL(s) for ${repository}@${version}`, + ); + const zhHashes: string[] = []; for (const shaUrl of shaUrls) { const hashes = @@ -177,6 +185,10 @@ export class TerraformProviderHash { zhHashes.push(...coerceArray(hashes)); } + logger.debug( + `Got ${zhHashes.length} zip hashes for ${repository}@${version}`, + ); + const h1Hashes = await TerraformProviderHash.calculateHashScheme1Hashes(builds); diff --git a/lib/modules/manager/terraform/lockfile/index.ts b/lib/modules/manager/terraform/lockfile/index.ts index 9e268f21c3ad76..769dffdc9c8092 100644 --- a/lib/modules/manager/terraform/lockfile/index.ts +++ b/lib/modules/manager/terraform/lockfile/index.ts @@ -163,6 +163,7 @@ export async function updateArtifacts({ // TODO #22198 ['provider', 'required_provider'].includes(dep.depType!), ); + logger.debug(`Found ${providerDeps.length} provider deps`); for (const dep of providerDeps) { massageProviderLookupName(dep); const { registryUrls, newVersion, packageName } = dep; @@ -172,6 +173,7 @@ export async function updateArtifacts({ ); // istanbul ignore if: needs test if (!updateLock) { + logger.debug(`Skipping. No lock found for "${packageName}"`); continue; } if (dep.isLockfileUpdate) { @@ -213,9 +215,10 @@ export async function updateArtifacts({ updates.length === 0 || updates.some((value) => !value.newHashes?.length) ) { + logger.debug('No updates found or hash creation failed'); return null; } - + logger.debug(`Writing updates to ${lockFilePath}`); const res = writeLockUpdates(updates, lockFilePath, lockFileContent); return [res]; } catch (err) {