File size: 20,229 Bytes
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
7436980
40e7f18
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
7436980
40e7f18
7436980
40e7f18
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
 
 
40e7f18
7436980
ce3fde5
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
ce3fde5
40e7f18
 
 
 
ce3fde5
40e7f18
 
 
ce3fde5
40e7f18
ce3fde5
40e7f18
 
 
 
ce3fde5
40e7f18
3557166
ce3fde5
 
40e7f18
 
 
 
 
 
 
 
7436980
ce3fde5
40e7f18
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
ce3fde5
40e7f18
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7436980
40e7f18
 
 
 
 
 
 
 
 
3557166
40e7f18
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
#!/usr/bin/env python3
"""
Granite Docling 258M - Hugging Face Spaces Demo

This is an online demo of the IBM Granite Docling 258M model implementation
running on Hugging Face Spaces with free GPU acceleration.
"""

import os
import sys
import tempfile
import json
import traceback
import time
from pathlib import Path
from typing import Tuple, Dict, Any, Optional

import gradio as gr

# Add current directory to path for imports
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

# Import the Granite Docling implementation
try:
    from granite_docling_gpu import GraniteDoclingGPU, DeviceManager
    DOCLING_AVAILABLE = True
except ImportError as e:
    try:
        from granite_docling import GraniteDocling as GraniteDoclingGPU
        from granite_docling import GraniteDocling
        DeviceManager = None
        DOCLING_AVAILABLE = True
    except ImportError as e:
        DOCLING_AVAILABLE = False
        IMPORT_ERROR = str(e)

class GraniteDoclingHFDemo:
    """Hugging Face Spaces demo interface for Granite Docling."""

    def __init__(self):
        """Initialize the HF Spaces demo."""
        self.granite_instance = None
        self.device_info = None

        if DOCLING_AVAILABLE:
            try:
                # Try to initialize with GPU support
                if DeviceManager:
                    device_manager = DeviceManager()
                    self.device_info = device_manager.get_device_info()
                    self.granite_instance = GraniteDoclingGPU(auto_device=True)
                else:
                    # Fallback to CPU version
                    self.granite_instance = GraniteDoclingGPU()

                print("βœ… Granite Docling initialized successfully")
                if hasattr(self.granite_instance, 'device'):
                    print(f"πŸ’» Using device: {self.granite_instance.device}")

            except Exception as e:
                print(f"⚠️ Warning: Could not initialize Granite Docling: {e}")
                self.granite_instance = None

    def process_document_demo(
        self,
        file_input,
        processing_mode: str,
        include_metadata: bool = True
    ) -> Tuple[str, str, str, str]:
        """
        Process uploaded document for HF Spaces demo.

        Returns: (markdown_output, json_metadata, processing_info, error_message)
        """
        if not DOCLING_AVAILABLE:
            error_msg = f"❌ Docling not available: {IMPORT_ERROR}"
            return "", "", "", error_msg

        if file_input is None:
            return "", "", "", "Please upload a file first."

        if self.granite_instance is None:
            return "", "", "", "❌ Granite Docling model not initialized. This might be due to missing model files."

        try:
            start_time = time.time()

            # Get device info for display
            device_used = getattr(self.granite_instance, 'device', 'CPU')
            processing_info = f"πŸ”§ Processing with Granite Docling on {device_used}...\n"

            # Save uploaded file to temporary location
            temp_file = None
            try:
                # Create temp file with original extension
                file_ext = Path(file_input.name).suffix if hasattr(file_input, 'name') else '.tmp'
                with tempfile.NamedTemporaryFile(delete=False, suffix=file_ext) as tmp:
                    if hasattr(file_input, 'read'):
                        tmp.write(file_input.read())
                    else:
                        # Handle file path case
                        with open(file_input, 'rb') as f:
                            tmp.write(f.read())
                    temp_file = tmp.name

                # Process based on selected mode
                if processing_mode == "Document Analysis (Fast)":
                    # Use the fast analysis method if available
                    if hasattr(self.granite_instance, 'analyze_document_structure'):
                        analysis_result = self.granite_instance.analyze_document_structure(temp_file)

                        if "error" in analysis_result:
                            markdown_output = f"""# Document Analysis - Error

⚠️ **Analysis Failed**: {analysis_result['error']}

**Processing Time**: {analysis_result.get('analysis_time_seconds', 0)} seconds
"""
                        else:
                            # Format the analysis result
                            structure = analysis_result.get('structure_detected', {})
                            metadata_info = analysis_result.get('metadata_extraction', {})

                            markdown_output = f"""# πŸ” Fast Document Analysis Report

## πŸ“Š Document Overview
- **File Name**: {analysis_result.get('file_name', 'Unknown')}
- **File Size**: {analysis_result.get('file_size_mb', 0)} MB
- **Document Type**: {analysis_result.get('document_type', 'Unknown')}
- **Total Pages**: {analysis_result.get('total_pages', 1)}
- **Pages Analyzed**: {analysis_result.get('pages_analyzed', 1)}
- **Analysis Time**: {analysis_result.get('analysis_time_seconds', 0)} seconds ⚑

## πŸ—οΈ Document Structure
- **Headers Detected**: {structure.get('headers_found', 0)}
- **Estimated Tables**: {structure.get('estimated_tables', 0)}
- **Images Found**: {structure.get('images_detected', 0)}
- **Text Density**: {structure.get('text_density', 'N/A')}
- **Contains Text**: {'Yes' if structure.get('has_text', False) else 'No'}

## πŸ“‘ Sample Headers Found:
{chr(10).join(f"β€’ {header}" for header in structure.get('sample_headers', [])) if structure.get('sample_headers') else "No headers detected"}

## πŸ“‹ Document Metadata:
{chr(10).join(f"β€’ **{k.replace('_', ' ').title()}**: {v}" for k, v in metadata_info.items() if v) if metadata_info else "No metadata available"}

## πŸ‘οΈ Content Preview:
```
{analysis_result.get('content_preview', 'No preview available')[:800]}
{'...' if len(analysis_result.get('content_preview', '')) > 800 else ''}
```

---
*This analysis was performed using lightweight document scanning for maximum speed. Perfect for getting quick insights into document structure!*
"""
                        # Use analysis result for metadata
                        result = analysis_result
                    else:
                        # Fallback to regular conversion with analysis
                        result = self.granite_instance.convert_document(temp_file)
                        lines = result["content"].split('\n')
                        headers = [line for line in lines if line.startswith('#')]

                        markdown_output = f"""# Document Analysis

## Quick Analysis Results
- **Total lines**: {len(lines)}
- **Headers found**: {len(headers)}
- **Processing time**: {time.time() - start_time:.2f}s
- **Device used**: {device_used}

## Sample Content:
{chr(10).join(lines[:15])}
"""

                elif processing_mode == "Full Markdown Conversion":
                    result = self.granite_instance.convert_document(temp_file)
                    markdown_output = result["content"]

                elif processing_mode == "Table Extraction":
                    result = self.granite_instance.convert_document(temp_file)
                    # Extract table-like content
                    lines = result["content"].split('\n')
                    table_lines = [line for line in lines if '|' in line and line.strip()]

                    if table_lines:
                        markdown_output = f"""# πŸ“Š Extracted Tables

**Device**: {device_used} | **Processing Time**: {time.time() - start_time:.2f}s

{chr(10).join(table_lines)}
"""
                    else:
                        markdown_output = f"""# No Tables Found

**Device**: {device_used} | **Processing Time**: {time.time() - start_time:.2f}s

No table structures were detected in this document.
"""

                else:  # Quick Preview
                    result = self.granite_instance.convert_document(temp_file)
                    preview = result["content"][:1000]
                    if len(result["content"]) > 1000:
                        preview += "\n\n... (truncated)"

                    markdown_output = f"""# Quick Preview

**Device**: {device_used} | **Processing Time**: {time.time() - start_time:.2f}s

{preview}
"""

                # Calculate final processing time
                processing_time = time.time() - start_time

                # Prepare metadata
                if 'result' in locals():
                    metadata = {
                        "processing_mode": processing_mode,
                        "device_used": str(device_used),
                        "file_name": getattr(file_input, 'name', 'uploaded_file'),
                        "content_length": len(markdown_output),
                        "processing_time_seconds": round(processing_time, 2),
                        "processing_successful": True,
                        "demo_info": "Processed on Hugging Face Spaces"
                    }

                    if hasattr(result, 'get') and 'metadata' in result:
                        metadata.update(result['metadata'])
                else:
                    metadata = {
                        "processing_mode": processing_mode,
                        "processing_time_seconds": round(processing_time, 2),
                        "processing_successful": True
                    }

                json_metadata = json.dumps(metadata, indent=2) if include_metadata else ""

                processing_info = f"""βœ… Successfully processed with Granite Docling
πŸ’» Device: {device_used}
⚑ Mode: {processing_mode}
⏱️ Processing time: {processing_time:.2f}s
πŸ“„ Content length: {len(markdown_output)} characters
🌐 Running on Hugging Face Spaces (CPU tier)"""

                return markdown_output, json_metadata, processing_info, ""

            finally:
                # Clean up temp file
                if temp_file and os.path.exists(temp_file):
                    try:
                        os.unlink(temp_file)
                    except:
                        pass

        except Exception as e:
            error_msg = f"❌ Error processing document: {str(e)}\n\nThis might be due to model loading issues on the free tier."
            return "", "", "", error_msg

    def create_demo_interface(self) -> gr.Interface:
        """Create the Hugging Face Spaces demo interface."""

        # Custom CSS for HF Spaces
        css = """
        .gradio-container {
            font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
            max-width: 1200px;
            margin: 0 auto;
        }
        .main-header {
            text-align: center;
            color: #ff6b35;
            margin-bottom: 20px;
            background: linear-gradient(90deg, #ff6b35, #f7931e);
            -webkit-background-clip: text;
            -webkit-text-fill-color: transparent;
            background-clip: text;
        }
        .info-box {
            background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
            color: white;
            padding: 20px;
            border-radius: 15px;
            margin: 15px 0;
            box-shadow: 0 8px 25px rgba(0,0,0,0.1);
        }
        .demo-box {
            background: linear-gradient(135deg, #f093fb 0%, #f5576c 100%);
            color: white;
            padding: 20px;
            border-radius: 15px;
            margin: 15px 0;
            box-shadow: 0 8px 25px rgba(0,0,0,0.1);
        }
        .feature-box {
            background: linear-gradient(135deg, #4facfe 0%, #00f2fe 100%);
            color: white;
            padding: 15px;
            border-radius: 10px;
            margin: 10px 0;
        }
        """

        with gr.Blocks(css=css, title="Granite Docling 258M Demo", theme=gr.themes.Soft()) as interface:

            # Header
            gr.HTML("""
                <div class="main-header">
                    <h1>πŸ”¬ Granite Docling 258M - Online Demo</h1>
                    <p>Experience IBM's cutting-edge Vision-Language Model for document processing</p>
                    <p><strong>πŸ†“ Free Document Processing on Hugging Face Spaces</strong></p>
                </div>
            """)

            # Demo info
            device_status = "πŸ’» CPU Processing (Free Tier)"
            if self.granite_instance and hasattr(self.granite_instance, 'device'):
                device = str(self.granite_instance.device)
                if 'CUDA' in device:
                    device_status = "πŸš€ GPU Processing (CUDA) - Paid Tier"
                elif 'MPS' in device:
                    device_status = "🍎 Apple Silicon Processing (MPS)"

            demo_info = f"""
                <div class="demo-box">
                    <h3>🌟 Live Demo Status</h3>
                    <p><strong>Status</strong>: {"βœ… Ready" if DOCLING_AVAILABLE and self.granite_instance else "⚠️ Limited"}</p>
                    <p><strong>Processing</strong>: {device_status}</p>
                    <p><strong>Model</strong>: <a href="https://huggingface.co/ibm-granite/granite-docling-258M" target="_blank" style="color: white; text-decoration: underline;">IBM Granite Docling 258M</a> Vision-Language Model</p>
                    <p><strong>Hosting</strong>: πŸ€— Hugging Face Spaces (Free CPU Tier)</p>
                    <p><strong>Note</strong>: Upgrade to GPU tier for faster processing</p>
                </div>
            """
            gr.HTML(demo_info)

            # Status check
            if not DOCLING_AVAILABLE or not self.granite_instance:
                gr.HTML(f"""
                    <div style="background-color: #ffe6e6; padding: 15px; border-radius: 8px; margin: 10px 0; color: #d00;">
                        <h3>⚠️ Demo Limitations</h3>
                        <p>The full model might not be available on the free CPU tier. Processing will be slower than GPU but still functional.</p>
                        <p>For full functionality, clone the repository: <a href="https://github.com/felipemeres/granite-docling-implementation" target="_blank">GitHub Repository</a></p>
                    </div>
                """)

            with gr.Row():
                with gr.Column(scale=1):
                    # Input section
                    gr.HTML("<h3>πŸ“„ Upload Document</h3>")

                    file_input = gr.File(
                        label="Upload Document",
                        file_types=[".pdf", ".docx", ".doc", ".png", ".jpg", ".jpeg"],
                        type="filepath"
                    )

                    processing_mode = gr.Dropdown(
                        choices=[
                            "Document Analysis (Fast)",
                            "Full Markdown Conversion",
                            "Table Extraction",
                            "Quick Preview"
                        ],
                        label="Processing Mode",
                        value="Document Analysis (Fast)",
                        info="Choose processing type (Fast Analysis recommended for demo)"
                    )

                    include_metadata = gr.Checkbox(
                        label="Include Processing Metadata",
                        value=True
                    )

                    process_btn = gr.Button(
                        "πŸš€ Process Document",
                        variant="primary",
                        size="lg"
                    )

                with gr.Column(scale=2):
                    # Output section
                    gr.HTML("<h3>πŸ“Š Results</h3>")

                    # Processing status
                    processing_info = gr.Textbox(
                        label="Processing Status",
                        lines=8,
                        interactive=False
                    )

                    # Main output tabs
                    with gr.Tabs():
                        with gr.TabItem("πŸ“‹ Processed Content"):
                            markdown_output = gr.Markdown(
                                label="Processed Output",
                                height=500
                            )

                        with gr.TabItem("πŸ”§ Metadata"):
                            json_output = gr.Code(
                                label="Processing Metadata",
                                language="json",
                                lines=12
                            )

                        with gr.TabItem("❌ Errors"):
                            error_output = gr.Textbox(
                                label="Error Messages",
                                lines=8,
                                interactive=False
                            )

            # Features and info section
            gr.HTML("<h3>✨ About This Demo</h3>")

            with gr.Row():
                with gr.Column():
                    gr.HTML("""
                        <div class="feature-box">
                            <h4>πŸš€ Key Features:</h4>
                            <ul>
                                <li><strong>Vision-Language Understanding</strong>: Advanced document comprehension</li>
                                <li><strong>Multi-Format Support</strong>: PDF, DOCX, Images</li>
                                <li><strong>Fast Analysis</strong>: 19x faster document insights</li>
                                <li><strong>Reliable Processing</strong>: CPU-optimized on HF Spaces free tier</li>
                            </ul>
                        </div>
                    """)

                with gr.Column():
                    gr.HTML("""
                        <div class="feature-box">
                            <h4>πŸ”¬ Try These Modes:</h4>
                            <ul>
                                <li><strong>Document Analysis</strong>: Quick structural insights (Recommended)</li>
                                <li><strong>Full Conversion</strong>: Complete Markdown output</li>
                                <li><strong>Table Extraction</strong>: Focus on data tables</li>
                                <li><strong>Quick Preview</strong>: Fast content sample</li>
                            </ul>
                        </div>
                    """)

            # Event handlers
            process_btn.click(
                fn=self.process_document_demo,
                inputs=[file_input, processing_mode, include_metadata],
                outputs=[markdown_output, json_output, processing_info, error_output]
            )

            # Footer with links
            gr.HTML("""
                <div class="info-box">
                    <h4>πŸ”— Links & Resources</h4>
                    <p>
                        <a href="https://github.com/felipemeres/granite-docling-implementation" target="_blank" style="color: white; text-decoration: underline;">πŸ“‚ GitHub Repository</a> |
                        <a href="https://huggingface.co/ibm-granite/granite-docling-258M" target="_blank" style="color: white; text-decoration: underline;">πŸ€— Model on Hugging Face</a> |
                        <a href="https://github.com/DS4SD/docling" target="_blank" style="color: white; text-decoration: underline;">πŸ“š Docling Documentation</a>
                    </p>
                    <p><em>This demo showcases a production-ready implementation of IBM's Granite Docling 258M model with performance optimizations and GPU acceleration.</em></p>
                </div>
            """)

        return interface

# Create and launch the demo
def main():
    """Main function to create and launch the HF Spaces demo."""
    print("πŸ”¬ Starting Granite Docling 258M Demo on Hugging Face Spaces...")

    demo = GraniteDoclingHFDemo()
    interface = demo.create_demo_interface()

    # Launch with HF Spaces settings
    interface.launch(
        server_name="0.0.0.0",  # Required for HF Spaces
        server_port=7860,       # Standard HF Spaces port
        share=False,            # Not needed on HF Spaces
        show_error=True
    )

if __name__ == "__main__":
    main()