diff --git a/src/core/file/fileProcess.ts b/src/core/file/fileProcess.ts index be9876165..2357892b3 100644 --- a/src/core/file/fileProcess.ts +++ b/src/core/file/fileProcess.ts @@ -5,64 +5,139 @@ import { initTaskRunner } from '../../shared/processConcurrency.js'; import type { RepomixProgressCallback } from '../../shared/types.js'; import { type FileManipulator, getFileManipulator } from './fileManipulate.js'; import type { ProcessedFile, RawFile } from './fileTypes.js'; +import { truncateBase64Content } from './truncateBase64.js'; import type { FileProcessTask } from './workers/fileProcessWorker.js'; type GetFileManipulator = (filePath: string) => FileManipulator | null; +/** + * Apply lightweight transforms on the main thread after worker processing. + * All lightweight transforms are centralized here to avoid duplication with workers. + * + * Transform order: [removeComments → compress] (worker) → truncateBase64 → removeEmptyLines → trim → showLineNumbers + * - removeEmptyLines runs after removeComments so that empty lines created by comment removal are cleaned up. + */ +export const applyLightweightTransforms = ( + files: ProcessedFile[], + config: RepomixConfigMerged, + progressCallback: RepomixProgressCallback, + deps: { getFileManipulator: GetFileManipulator }, +): ProcessedFile[] => { + const totalFiles = files.length; + const results: ProcessedFile[] = Array.from({ length: totalFiles }) as ProcessedFile[]; + + for (let i = 0; i < totalFiles; i++) { + const file = files[i]; + let content = file.content; + + if (config.output.truncateBase64) { + content = truncateBase64Content(content); + } + + if (config.output.removeEmptyLines) { + const manipulator = deps.getFileManipulator(file.path); + if (manipulator) { + content = manipulator.removeEmptyLines(content); + } + } + + content = content.trim(); + + if (config.output.showLineNumbers && !config.output.compress) { + const lines = content.split('\n'); + const padding = lines.length.toString().length; + const numberedLines = lines.map((line, idx) => `${(idx + 1).toString().padStart(padding)}: ${line}`); + content = numberedLines.join('\n'); + } + + results[i] = { path: file.path, content }; + + if ((i + 1) % 50 === 0 || i === totalFiles - 1) { + progressCallback(`Processing file... (${i + 1}/${totalFiles}) ${pc.dim(file.path)}`); + } + } + + return results; +}; + +/** + * Process files through a two-phase pipeline: + * + * 1. Heavy transforms (worker threads, skipped when not needed): + * removeComments → compress + * + * 2. Lightweight transforms (main thread, always applied): + * truncateBase64 → removeEmptyLines → trim → showLineNumbers + * + * removeEmptyLines intentionally runs after removeComments so that + * empty lines created by comment removal are cleaned up. + */ export const processFiles = async ( rawFiles: RawFile[], config: RepomixConfigMerged, progressCallback: RepomixProgressCallback, - deps: { - initTaskRunner: typeof initTaskRunner; - getFileManipulator: GetFileManipulator; - } = { + deps = { initTaskRunner, getFileManipulator, }, ): Promise => { - const taskRunner = deps.initTaskRunner({ - numOfTasks: rawFiles.length, - workerType: 'fileProcess', - // High memory usage and leak risk - runtime: 'worker_threads', - }); - const tasks = rawFiles.map( - (rawFile, _index) => - ({ - rawFile, - config, - }) satisfies FileProcessTask, - ); - - try { - const startTime = process.hrtime.bigint(); + const startTime = process.hrtime.bigint(); + let files: ProcessedFile[]; + + // Only compress (tree-sitter) and removeComments (AST manipulation) justify worker thread overhead + const useWorkers = config.output.compress || config.output.removeComments; + + if (useWorkers) { + // Phase 1: Heavy processing via workers (removeComments, compress) logger.trace(`Starting file processing for ${rawFiles.length} files using worker pool`); - let completedTasks = 0; - const totalTasks = tasks.length; - - const results = await Promise.all( - tasks.map((task) => - taskRunner.run(task).then((result) => { - completedTasks++; - progressCallback(`Processing file... (${completedTasks}/${totalTasks}) ${pc.dim(task.rawFile.path)}`); - logger.trace(`Processing file... (${completedTasks}/${totalTasks}) ${task.rawFile.path}`); - return result; - }), - ), + const taskRunner = deps.initTaskRunner({ + numOfTasks: rawFiles.length, + workerType: 'fileProcess', + runtime: 'worker_threads', + }); + + const tasks = rawFiles.map( + (rawFile) => + ({ + rawFile, + config, + }) satisfies FileProcessTask, ); - const endTime = process.hrtime.bigint(); - const duration = Number(endTime - startTime) / 1e6; - logger.trace(`File processing completed in ${duration.toFixed(2)}ms`); - - return results; - } catch (error) { - logger.error('Error during file processing:', error); - throw error; - } finally { - // Always cleanup worker pool - await taskRunner.cleanup(); + try { + let completedTasks = 0; + const totalTasks = tasks.length; + + files = await Promise.all( + tasks.map((task) => + taskRunner.run(task).then((result) => { + completedTasks++; + progressCallback(`Processing file... (${completedTasks}/${totalTasks}) ${pc.dim(task.rawFile.path)}`); + logger.trace(`Processing file... (${completedTasks}/${totalTasks}) ${task.rawFile.path}`); + return result; + }), + ), + ); + } catch (error) { + logger.error('Error during file processing:', error); + throw error; + } finally { + await taskRunner.cleanup(); + } + + // Phase 2: Lightweight transforms (no progress - already reported by workers) + files = applyLightweightTransforms(files, config, () => {}, deps); + } else { + // No heavy processing needed - apply lightweight transforms directly + logger.trace(`Starting file processing for ${rawFiles.length} files in main thread (lightweight mode)`); + const inputFiles = rawFiles.map((rawFile) => ({ path: rawFile.path, content: rawFile.content })); + files = applyLightweightTransforms(inputFiles, config, progressCallback, deps); } + + const endTime = process.hrtime.bigint(); + const duration = Number(endTime - startTime) / 1e6; + logger.trace(`File processing completed in ${duration.toFixed(2)}ms`); + + return files; }; diff --git a/src/core/file/fileProcessContent.ts b/src/core/file/fileProcessContent.ts index 18f3c6568..b6acdb679 100644 --- a/src/core/file/fileProcessContent.ts +++ b/src/core/file/fileProcessContent.ts @@ -3,20 +3,15 @@ import { logger } from '../../shared/logger.js'; import { parseFile } from '../treeSitter/parseFile.js'; import { getFileManipulator } from './fileManipulate.js'; import type { RawFile } from './fileTypes.js'; -import { truncateBase64Content } from './truncateBase64.js'; /** - * Process the content of a file according to the configuration - * Applies various transformations based on the config: - * - Remove comments - * - Remove empty lines - * - Truncate base64 encoded data + * Process the content of a file for CPU-intensive operations. + * Only handles heavy transformations that benefit from worker threads: + * - Remove comments (language-specific AST manipulation) * - Compress content using Tree-sitter - * - Add line numbers * - * @param rawFile Raw file data containing path and content - * @param config Repomix configuration - * @returns Processed content string + * Lightweight transforms (truncateBase64, removeEmptyLines, trim, showLineNumbers) + * are applied separately on the main thread by processFiles(). */ export const processContent = async (rawFile: RawFile, config: RepomixConfigMerged): Promise => { const processStartAt = process.hrtime.bigint(); @@ -25,20 +20,10 @@ export const processContent = async (rawFile: RawFile, config: RepomixConfigMerg logger.trace(`Processing file: ${rawFile.path}`); - if (config.output.truncateBase64) { - processedContent = truncateBase64Content(processedContent); - } - if (manipulator && config.output.removeComments) { processedContent = manipulator.removeComments(processedContent); } - if (config.output.removeEmptyLines && manipulator) { - processedContent = manipulator.removeEmptyLines(processedContent); - } - - processedContent = processedContent.trim(); - if (config.output.compress) { try { const parsedContent = await parseFile(processedContent, rawFile.path, config); @@ -49,14 +34,8 @@ export const processContent = async (rawFile: RawFile, config: RepomixConfigMerg } catch (error: unknown) { const message = error instanceof Error ? error.message : String(error); logger.error(`Error parsing ${rawFile.path} in compressed mode: ${message}`); - //re-throw error throw error; } - } else if (config.output.showLineNumbers) { - const lines = processedContent.split('\n'); - const padding = lines.length.toString().length; - const numberedLines = lines.map((line, i) => `${(i + 1).toString().padStart(padding)}: ${line}`); - processedContent = numberedLines.join('\n'); } const processEndAt = process.hrtime.bigint(); diff --git a/src/core/file/truncateBase64.ts b/src/core/file/truncateBase64.ts index da01933ce..c186c529a 100644 --- a/src/core/file/truncateBase64.ts +++ b/src/core/file/truncateBase64.ts @@ -5,6 +5,13 @@ const TRUNCATION_LENGTH = 32; const MIN_CHAR_DIVERSITY = 10; const MIN_CHAR_TYPE_COUNT = 3; +// Pre-compiled regex patterns (avoid re-creation per file) +const dataUriPattern = new RegExp( + `data:([a-zA-Z0-9\\/\\-\\+]+)(;[a-zA-Z0-9\\-=]+)*;base64,([A-Za-z0-9+/=]{${MIN_BASE64_LENGTH_DATA_URI},})`, + 'g', +); +const standaloneBase64Pattern = new RegExp(`([A-Za-z0-9+/]{${MIN_BASE64_LENGTH_STANDALONE},}={0,2})`, 'g'); + /** * Truncates base64 encoded data in content to reduce file size * Detects common base64 patterns like data URIs and standalone base64 strings @@ -13,15 +20,9 @@ const MIN_CHAR_TYPE_COUNT = 3; * @returns Content with base64 data truncated */ export const truncateBase64Content = (content: string): string => { - // Pattern to match data URIs (e.g., data:image/png;base64,...) - const dataUriPattern = new RegExp( - `data:([a-zA-Z0-9\\/\\-\\+]+)(;[a-zA-Z0-9\\-=]+)*;base64,([A-Za-z0-9+/=]{${MIN_BASE64_LENGTH_DATA_URI},})`, - 'g', - ); - - // Pattern to match standalone base64 strings - // This matches base64 strings that are likely encoded binary data - const standaloneBase64Pattern = new RegExp(`([A-Za-z0-9+/]{${MIN_BASE64_LENGTH_STANDALONE},}={0,2})`, 'g'); + // Reset lastIndex since patterns are global and reused across calls + dataUriPattern.lastIndex = 0; + standaloneBase64Pattern.lastIndex = 0; let processedContent = content; diff --git a/tests/core/file/fileProcess.test.ts b/tests/core/file/fileProcess.test.ts index f21baf5b7..46a4e823f 100644 --- a/tests/core/file/fileProcess.test.ts +++ b/tests/core/file/fileProcess.test.ts @@ -1,8 +1,7 @@ import { describe, expect, it } from 'vitest'; import type { FileManipulator } from '../../../src/core/file/fileManipulate.js'; -import { processFiles } from '../../../src/core/file/fileProcess.js'; -import { processContent } from '../../../src/core/file/fileProcessContent.js'; -import type { RawFile } from '../../../src/core/file/fileTypes.js'; +import { applyLightweightTransforms, processFiles } from '../../../src/core/file/fileProcess.js'; +import type { ProcessedFile, RawFile } from '../../../src/core/file/fileTypes.js'; import type { FileProcessTask } from '../../../src/core/file/workers/fileProcessWorker.js'; import fileProcessWorker from '../../../src/core/file/workers/fileProcessWorker.js'; import type { WorkerOptions } from '../../../src/shared/processConcurrency.js'; @@ -33,7 +32,7 @@ const mockInitTaskRunner = (_options: WorkerOptions) => { describe('fileProcess', () => { describe('processFiles', () => { - it('should process multiple files', async () => { + it('should process multiple files with worker path', async () => { const mockRawFiles: RawFile[] = [ { path: 'file1.js', content: '// comment\nconst a = 1;' }, { path: 'file2.js', content: '/* comment */\nconst b = 2;' }, @@ -55,119 +54,201 @@ describe('fileProcess', () => { { path: 'file2.js', content: 'const b = 2;' }, ]); }); - }); - describe('processContent', () => { - it('should remove comments and empty lines when configured', async () => { - const content = '// comment\nconst a = 1;\n\n/* multi-line\ncomment */\nconst b = 2;'; - const filePath = 'test.js'; + it('should apply all transforms in combined worker + lightweight pipeline', async () => { + const base64 = + 'DTJXfKHG6xA1Wn+kye4TOF2Cp8zxFjtgharP9Bk+Y4it0vccQWaLsNX6H0RpjrPY/SJHbJG22wAlSm+Uud4DKE1yl7zhBitQdZq/5AkuU3idwucMMVZ7oMXqDzRZfqPI7RI3XIGmy/AVOl+Eqc7zGD1ih6zR9htAZYqv1PkeQ2iNstf8IUZrkLXa/yRJbpO43QInTHGWu+AFKk90mb7jCC1Sd5zB5gswVXqfxOkOM1h9osfsETZbgKXK7xQ5XoOozfIXPGGGq9D1Gj9kia7T+B1CZ4yx1vsgRWqPtNn+I0htkrfcASZLcJW63wQpTnOYveIHLFF2m8DlCi9UeZ7D6A=='; + const mockRawFiles: RawFile[] = [ + { + path: 'file1.js', + content: `// comment\nconst a = 1;\n\nconst img = "${base64}";`, + }, + ]; const config = createMockConfig({ output: { removeComments: true, removeEmptyLines: true, + truncateBase64: true, + showLineNumbers: true, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = await processFiles(mockRawFiles, config, () => {}, { + initTaskRunner: mockInitTaskRunner, + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe('const a = 1;\nconst b = 2;'); + // removeComments removes comment, removeEmptyLines cleans up, truncateBase64 truncates, showLineNumbers adds numbers + expect(result.length).toBe(1); + expect(result[0].content).toContain('1:'); + expect(result[0].content).toContain('2:'); + expect(result[0].content).toContain('...'); + expect(result[0].content).not.toContain('// comment'); + expect(result[0].content).not.toContain(base64); }); - it('should not remove comments or empty lines when not configured', async () => { - const content = '// comment\nconst a = 1;\n\n/* multi-line\ncomment */\nconst b = 2;'; - const filePath = 'test.js'; + it('should process files with lightweight-only config', async () => { + const mockRawFiles: RawFile[] = [ + { path: 'file1.js', content: ' const a = 1; \n\n' }, + { path: 'file2.js', content: '\nconst b = 2;\n\n' }, + ]; const config = createMockConfig({ output: { removeComments: false, - removeEmptyLines: false, + removeEmptyLines: true, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = await processFiles(mockRawFiles, config, () => {}, { + initTaskRunner: mockInitTaskRunner, + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe(content.trim()); + expect(result).toEqual([ + { path: 'file1.js', content: 'const a = 1;' }, + { path: 'file2.js', content: 'const b = 2;' }, + ]); }); + }); - it('should handle files without a manipulator', async () => { - const content = 'Some content'; - const filePath = 'unknown.ext'; + describe('applyLightweightTransforms', () => { + it('should truncate base64 when configured', () => { + const base64 = + 'DTJXfKHG6xA1Wn+kye4TOF2Cp8zxFjtgharP9Bk+Y4it0vccQWaLsNX6H0RpjrPY/SJHbJG22wAlSm+Uud4DKE1yl7zhBitQdZq/5AkuU3idwucMMVZ7oMXqDzRZfqPI7RI3XIGmy/AVOl+Eqc7zGD1ih6zR9htAZYqv1PkeQ2iNstf8IUZrkLXa/yRJbpO43QInTHGWu+AFKk90mb7jCC1Sd5zB5gswVXqfxOkOM1h9osfsETZbgKXK7xQ5XoOozfIXPGGGq9D1Gj9kia7T+B1CZ4yx1vsgRWqPtNn+I0htkrfcASZLcJW63wQpTnOYveIHLFF2m8DlCi9UeZ7D6A=='; + const files: ProcessedFile[] = [{ path: 'test.js', content: `const img = "${base64}";` }]; + const config = createMockConfig({ + output: { + truncateBase64: true, + }, + }); + + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); + + expect(result[0].content).toContain('...'); + expect(result[0].content.length).toBeLessThan(files[0].content.length); + }); + + it('should remove empty lines when configured', () => { + const files: ProcessedFile[] = [{ path: 'test.js', content: 'line1\n\nline2\n\nline3' }]; + const config = createMockConfig({ + output: { + removeEmptyLines: true, + }, + }); + + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); + + expect(result).toEqual([{ path: 'test.js', content: 'line1\nline2\nline3' }]); + }); + + it('should not remove empty lines for files without a manipulator', () => { + const files: ProcessedFile[] = [{ path: 'test.unknown', content: 'line1\n\nline2' }]; const config = createMockConfig({ output: { - removeComments: true, removeEmptyLines: true, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe(content); + expect(result).toEqual([{ path: 'test.unknown', content: 'line1\n\nline2' }]); }); - it('should add line numbers when showLineNumbers is true', async () => { - const content = 'Line 1\nLine 2\nLine 3'; - const filePath = 'test.txt'; + it('should trim content', () => { + const files: ProcessedFile[] = [{ path: 'test.js', content: ' hello \n' }]; + const config = createMockConfig(); + + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); + + expect(result).toEqual([{ path: 'test.js', content: 'hello' }]); + }); + + it('should add line numbers when showLineNumbers is true', () => { + const files: ProcessedFile[] = [{ path: 'test.txt', content: 'Line 1\nLine 2\nLine 3' }]; const config = createMockConfig({ output: { showLineNumbers: true, - removeComments: false, - removeEmptyLines: false, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe('1: Line 1\n2: Line 2\n3: Line 3'); + expect(result).toEqual([{ path: 'test.txt', content: '1: Line 1\n2: Line 2\n3: Line 3' }]); }); - it('should not add line numbers when showLineNumbers is false', async () => { - const content = 'Line 1\nLine 2\nLine 3'; - const filePath = 'test.txt'; + it('should not add line numbers when showLineNumbers is false', () => { + const files: ProcessedFile[] = [{ path: 'test.txt', content: 'Line 1\nLine 2\nLine 3' }]; const config = createMockConfig({ output: { showLineNumbers: false, - removeComments: false, - removeEmptyLines: false, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe('Line 1\nLine 2\nLine 3'); + expect(result).toEqual([{ path: 'test.txt', content: 'Line 1\nLine 2\nLine 3' }]); }); - it('should handle empty content when showLineNumbers is true', async () => { - const content = ''; - const filePath = 'empty.txt'; + it('should handle empty content when showLineNumbers is true', () => { + const files: ProcessedFile[] = [{ path: 'empty.txt', content: '' }]; const config = createMockConfig({ output: { showLineNumbers: true, - removeComments: false, - removeEmptyLines: false, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); - expect(result).toBe('1: '); + expect(result).toEqual([{ path: 'empty.txt', content: '1: ' }]); }); - it('should pad line numbers correctly for files with many lines', async () => { + it('should pad line numbers correctly for files with many lines', () => { const content = Array(100).fill('Line').join('\n'); - const filePath = 'long.txt'; + const files: ProcessedFile[] = [{ path: 'long.txt', content }]; const config = createMockConfig({ output: { showLineNumbers: true, - removeComments: false, - removeEmptyLines: false, }, }); - const result = await processContent({ path: filePath, content }, config); + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); - const lines = result.split('\n'); + const lines = result[0].content.split('\n'); expect(lines[0]).toBe(' 1: Line'); expect(lines[9]).toBe(' 10: Line'); expect(lines[99]).toBe('100: Line'); }); + + it('should not add line numbers when compress is enabled', () => { + const files: ProcessedFile[] = [{ path: 'test.txt', content: 'Line 1\nLine 2' }]; + const config = createMockConfig({ + output: { + showLineNumbers: true, + compress: true, + }, + }); + + const result = applyLightweightTransforms(files, config, () => {}, { + getFileManipulator: mockGetFileManipulator, + }); + + expect(result).toEqual([{ path: 'test.txt', content: 'Line 1\nLine 2' }]); + }); }); }); diff --git a/tests/core/file/fileProcessContent.test.ts b/tests/core/file/fileProcessContent.test.ts index ffc864489..a64e6260c 100644 --- a/tests/core/file/fileProcessContent.test.ts +++ b/tests/core/file/fileProcessContent.test.ts @@ -38,7 +38,6 @@ describe('processContent', () => { const result = await processContent(rawFile, config); expect(result).toBe('const x = 1;\n\nconst y = 2;'); expect(mockManipulator.removeComments).not.toHaveBeenCalled(); - expect(mockManipulator.removeEmptyLines).not.toHaveBeenCalled(); }); it('should remove comments when configured', async () => { @@ -60,25 +59,6 @@ describe('processContent', () => { expect(result).toBe('const x = 1; \nconst y = 2;'); }); - it('should remove empty lines when configured', async () => { - const rawFile: RawFile = { - path: 'test.ts', - content: 'const x = 1;\n\n\nconst y = 2;', - }; - const config: RepomixConfigMerged = { - output: { - removeComments: false, - removeEmptyLines: true, - compress: false, - showLineNumbers: false, - }, - } as RepomixConfigMerged; - - const result = await processContent(rawFile, config); - expect(mockManipulator.removeEmptyLines).toHaveBeenCalledWith(rawFile.content); - expect(result).toBe('const x = 1;\nconst y = 2;'); - }); - it('should compress content using Tree-sitter when configured', async () => { const rawFile: RawFile = { path: 'test.ts', @@ -138,24 +118,6 @@ describe('processContent', () => { await expect(processContent(rawFile, config)).rejects.toThrow('Parse error'); }); - it('should add line numbers when configured', async () => { - const rawFile: RawFile = { - path: 'test.ts', - content: 'const x = 1;\nconst y = 2;\nconst z = 3;', - }; - const config: RepomixConfigMerged = { - output: { - removeComments: false, - removeEmptyLines: false, - compress: false, - showLineNumbers: true, - }, - } as RepomixConfigMerged; - - const result = await processContent(rawFile, config); - expect(result).toBe('1: const x = 1;\n2: const y = 2;\n3: const z = 3;'); - }); - it('should handle files without a manipulator', async () => { const rawFile: RawFile = { path: 'test.unknown', @@ -164,7 +126,7 @@ describe('processContent', () => { const config: RepomixConfigMerged = { output: { removeComments: true, - removeEmptyLines: true, + removeEmptyLines: false, compress: false, showLineNumbers: false, }, diff --git a/tests/core/file/truncateBase64.test.ts b/tests/core/file/truncateBase64.test.ts index 75eaf44a8..3f7d42373 100644 --- a/tests/core/file/truncateBase64.test.ts +++ b/tests/core/file/truncateBase64.test.ts @@ -89,6 +89,13 @@ describe('truncateBase64Content', () => { expect(result).toBe('const paddedData = "DTJXfKHG6xA1Wn+kye4TOF2Cp8zxFjtg...";'); }); + it('should produce consistent results on consecutive calls (regex lastIndex safety)', () => { + const input = `const img = "data:image/png;base64,${longBase64}";`; + const result1 = truncateBase64Content(input); + const result2 = truncateBase64Content(input); + expect(result1).toBe(result2); + }); + it('should preserve medium-length base64-like strings under 256 chars', () => { // 60-char string that previously would have been truncated const mediumString = 'VGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZy4=';