File size: 7,237 Bytes
cfb37bf
fb3185e
 
 
c662fe8
 
1ec4316
f094617
fb3185e
 
cfb37bf
c662fe8
 
 
fb3185e
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
f094617
fb3185e
 
 
 
 
 
 
 
 
f31f6ca
c662fe8
f31f6ca
c662fe8
fb3185e
d6e55c9
f31f6ca
 
d6e55c9
 
 
 
c662fe8
d6e55c9
 
fb3185e
f31f6ca
d6e55c9
 
 
fb3185e
d6e55c9
c662fe8
fb3185e
c662fe8
 
 
 
 
 
 
f31f6ca
 
 
 
 
 
c662fe8
d6e55c9
c662fe8
fb3185e
c662fe8
 
fb3185e
c662fe8
fb3185e
c662fe8
 
fb3185e
 
a987d91
c662fe8
 
fb3185e
 
c662fe8
 
 
f31f6ca
c662fe8
 
 
 
 
 
 
fb3185e
 
f31f6ca
c662fe8
fb3185e
 
 
 
 
f094617
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
import gradio as gr
import json
import tempfile
import os
from typing import List, Optional, Literal
from PIL import Image
import spaces
from pathlib import Path
from htrflow.volume.volume import Collection
from htrflow.pipeline.pipeline import Pipeline

DEFAULT_OUTPUT = "alto"
CHOICES = ["txt", "alto", "page", "json"]

PIPELINE_CONFIGS = {
    "letter_english": {
        "steps": [
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-lines-within-regions-1"},
                    "generation_settings": {"batch_size": 8},
                },
            },
            {
                "step": "TextRecognition",
                "settings": {
                    "model": "TrOCR",
                    "model_settings": {"model": "microsoft/trocr-base-handwritten"},
                    "generation_settings": {"batch_size": 16},
                },
            },
            {"step": "OrderLines"},
        ]
    },
    "letter_swedish": {
        "steps": [
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-lines-within-regions-1"},
                    "generation_settings": {"batch_size": 8},
                },
            },
            {
                "step": "TextRecognition",
                "settings": {
                    "model": "TrOCR",
                    "model_settings": {"model": "Riksarkivet/trocr-base-handwritten-hist-swe-2"},
                    "generation_settings": {"batch_size": 16},
                },
            },
            {"step": "OrderLines"},
        ]
    },
    "spread_english": {
        "steps": [
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-regions-1"},
                    "generation_settings": {"batch_size": 4},
                },
            },
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-lines-within-regions-1"},
                    "generation_settings": {"batch_size": 8},
                },
            },
            {
                "step": "TextRecognition",
                "settings": {
                    "model": "TrOCR",
                    "model_settings": {"model": "microsoft/trocr-base-handwritten"},
                    "generation_settings": {"batch_size": 16},
                },
            },
            {"step": "ReadingOrderMarginalia", "settings": {"two_page": True}},
        ]
    },
    "spread_swedish": {
        "steps": [
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-regions-1"},
                    "generation_settings": {"batch_size": 4},
                },
            },
            {
                "step": "Segmentation",
                "settings": {
                    "model": "yolo",
                    "model_settings": {"model": "Riksarkivet/yolov9-lines-within-regions-1"},
                    "generation_settings": {"batch_size": 8},
                },
            },
            {
                "step": "TextRecognition",
                "settings": {
                    "model": "TrOCR",
                    "model_settings": {"model": "Riksarkivet/trocr-base-handwritten-hist-swe-2"},
                    "generation_settings": {"batch_size": 16},
                },
            },
            {"step": "ReadingOrderMarginalia", "settings": {"two_page": True}},
        ]
    },
}

@spaces.GPU
def process_htr(image_path: str, document_type: Literal["letter_english", "letter_swedish", "spread_english", "spread_swedish"] = "letter_english", output_format: Literal["txt", "alto", "page", "json"] = DEFAULT_OUTPUT, custom_settings: Optional[str] = None):
    """Process handwritten text recognition and return extracted text with specified format file."""
    if image_path is None:
        return "Error: No image provided", None

    try:
        original_filename = Path(image_path).stem or "output"
        
        if custom_settings:
            try:
                config = json.loads(custom_settings)
            except json.JSONDecodeError:
                return "Error: Invalid JSON in custom_settings parameter", None
        else:
            config = PIPELINE_CONFIGS[document_type]

        collection = Collection([image_path])
        pipeline = Pipeline.from_config(config)
        
        try:
            processed_collection = pipeline.run(collection)
        except Exception as pipeline_error:
            return f"Error: Pipeline execution failed: {str(pipeline_error)}", None

        temp_dir = Path(tempfile.mkdtemp())
        export_dir = temp_dir / output_format
        processed_collection.save(directory=str(export_dir), serializer=output_format)
        
        output_file_path = None
        for root, _, files in os.walk(export_dir):
            for file in files:
                old_path = os.path.join(root, file)
                file_ext = Path(file).suffix
                new_filename = f"{original_filename}.{output_format}" if not file_ext else f"{original_filename}{file_ext}"
                new_path = os.path.join(root, new_filename)
                os.rename(old_path, new_path)
                output_file_path = new_path
                break
        
        extracted_text = extract_text_from_collection(processed_collection)

        return extracted_text, output_file_path
        
    except Exception as e:
        return f"Error: HTR processing failed: {str(e)}", None

def extract_text_from_collection(collection: Collection) -> str:
    text_lines = []
    for page in collection.pages:
        for node in page.traverse():
            if hasattr(node, "text") and node.text:
                text_lines.append(node.text)
    return "\n".join(text_lines)

def create_htrflow_mcp_server():
    demo = gr.Interface(
        fn=process_htr,
        inputs=[
            gr.Image(type="filepath", label="Upload Image or Enter URL"),
            gr.Dropdown(choices=["letter_english", "letter_swedish", "spread_english", "spread_swedish"], value="letter_english", label="Document Type"),
            gr.Dropdown(choices=CHOICES, value=DEFAULT_OUTPUT, label="Output Format"),
            gr.Textbox(label="Custom Settings (JSON)", placeholder="Optional custom pipeline settings"),
        ],
        outputs=[
            gr.Textbox(label="Extracted Text", lines=10),
            gr.File(label="Download Output File")
        ],
        title="HTRflow MCP Server",
        description="Process handwritten text from uploaded file or URL and get extracted text with output file in specified format",
        api_name="process_htr",
    )
    return demo

if __name__ == "__main__":
    demo = create_htrflow_mcp_server()
    demo.launch(mcp_server=True)