183 lines
5.6 KiB
Python
183 lines
5.6 KiB
Python
"""
|
|
Example usage of ProgressLogger for workflow progress tracking.
|
|
|
|
This file demonstrates how to use the ProgressLogger utility to track
|
|
progress of long-running operations in workflows.
|
|
"""
|
|
|
|
import asyncio
|
|
import time
|
|
from modules.shared.progressLogger import ProgressLogger
|
|
|
|
|
|
async def exampleDocumentProcessing(workflowService, workflow):
|
|
"""Example of document processing with progress tracking."""
|
|
|
|
# Create progress logger
|
|
progressLogger = workflowService.createProgressLogger(workflow)
|
|
operationId = f"docProcess_{workflow.id}_{int(time.time())}"
|
|
|
|
try:
|
|
# Start operation
|
|
progressLogger.startOperation(
|
|
operationId,
|
|
"Extract",
|
|
"Document Processing",
|
|
"Processing 5 documents"
|
|
)
|
|
|
|
# Simulate processing steps
|
|
documents = ["doc1.pdf", "doc2.docx", "doc3.txt", "doc4.xlsx", "doc5.pdf"]
|
|
|
|
for i, doc in enumerate(documents):
|
|
# Update progress for each document
|
|
progress = (i + 1) / len(documents)
|
|
progressLogger.updateProgress(
|
|
operationId,
|
|
progress,
|
|
f"Processing {doc} ({i+1}/{len(documents)})"
|
|
)
|
|
|
|
# Simulate processing time
|
|
await asyncio.sleep(0.5)
|
|
|
|
# Complete operation
|
|
progressLogger.completeOperation(operationId, True)
|
|
|
|
except Exception as e:
|
|
# Complete with failure
|
|
progressLogger.completeOperation(operationId, False)
|
|
raise
|
|
|
|
|
|
async def exampleAiProcessing(workflowService, workflow):
|
|
"""Example of AI processing with chunk progress tracking."""
|
|
|
|
progressLogger = workflowService.createProgressLogger(workflow)
|
|
operationId = f"aiProcess_{workflow.id}_{int(time.time())}"
|
|
|
|
try:
|
|
# Start operation
|
|
progressLogger.startOperation(
|
|
operationId,
|
|
"AI",
|
|
"Content Analysis",
|
|
"Processing 10 chunks"
|
|
)
|
|
|
|
# Simulate AI processing with chunks
|
|
chunks = list(range(1, 11))
|
|
|
|
for i, chunk in enumerate(chunks):
|
|
progress = (i + 1) / len(chunks)
|
|
progressLogger.updateProgress(
|
|
operationId,
|
|
progress,
|
|
f"Processing chunk {chunk} of {len(chunks)}"
|
|
)
|
|
|
|
# Simulate AI processing time
|
|
await asyncio.sleep(0.3)
|
|
|
|
# Complete operation
|
|
progressLogger.completeOperation(operationId, True)
|
|
|
|
except Exception as e:
|
|
progressLogger.completeOperation(operationId, False)
|
|
raise
|
|
|
|
|
|
async def exampleWorkflowTask(workflowService, workflow):
|
|
"""Example of workflow task execution with progress tracking."""
|
|
|
|
progressLogger = workflowService.createProgressLogger(workflow)
|
|
operationId = f"workflowTask_{workflow.id}_{int(time.time())}"
|
|
|
|
try:
|
|
# Start operation
|
|
progressLogger.startOperation(
|
|
operationId,
|
|
"Workflow",
|
|
"Task Execution",
|
|
"Executing data analysis task"
|
|
)
|
|
|
|
# Simulate task steps
|
|
steps = [
|
|
"Initializing analysis",
|
|
"Loading data",
|
|
"Processing data",
|
|
"Generating results",
|
|
"Saving output"
|
|
]
|
|
|
|
for i, step in enumerate(steps):
|
|
progress = (i + 1) / len(steps)
|
|
progressLogger.updateProgress(
|
|
operationId,
|
|
progress,
|
|
step
|
|
)
|
|
|
|
# Simulate step processing time
|
|
await asyncio.sleep(0.4)
|
|
|
|
# Complete operation
|
|
progressLogger.completeOperation(operationId, True)
|
|
|
|
except Exception as e:
|
|
progressLogger.completeOperation(operationId, False)
|
|
raise
|
|
|
|
|
|
# Example of how to integrate into existing services
|
|
class ExampleService:
|
|
"""Example service showing integration with ProgressLogger."""
|
|
|
|
def __init__(self, workflowService):
|
|
self.workflowService = workflowService
|
|
|
|
async def processWithProgress(self, workflow, data):
|
|
"""Process data with progress tracking."""
|
|
|
|
progressLogger = self.workflowService.createProgressLogger(workflow)
|
|
operationId = f"example_{workflow.id}_{int(time.time())}"
|
|
|
|
try:
|
|
# Start operation
|
|
progressLogger.startOperation(
|
|
operationId,
|
|
"Example",
|
|
"Data Processing",
|
|
f"Processing {len(data)} items"
|
|
)
|
|
|
|
# Process data in chunks
|
|
chunkSize = 10
|
|
totalChunks = (len(data) + chunkSize - 1) // chunkSize
|
|
|
|
for i in range(0, len(data), chunkSize):
|
|
chunk = data[i:i + chunkSize]
|
|
chunkNum = i // chunkSize + 1
|
|
|
|
progress = chunkNum / totalChunks
|
|
progressLogger.updateProgress(
|
|
operationId,
|
|
progress,
|
|
f"Processing chunk {chunkNum}/{totalChunks}"
|
|
)
|
|
|
|
# Process chunk
|
|
await self._processChunk(chunk)
|
|
|
|
# Complete operation
|
|
progressLogger.completeOperation(operationId, True)
|
|
|
|
except Exception as e:
|
|
progressLogger.completeOperation(operationId, False)
|
|
raise
|
|
|
|
async def _processChunk(self, chunk):
|
|
"""Process a chunk of data."""
|
|
# Simulate processing
|
|
await asyncio.sleep(0.1)
|