|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
|
|
import glob |
|
import hashlib |
|
import logging |
|
import os |
|
import shutil |
|
import subprocess |
|
import sys |
|
import yaml |
|
|
|
OUT_DIR = '/tmp' |
|
|
|
logging.basicConfig( |
|
format='%(asctime)s %(levelname)-8s %(message)s', |
|
level=logging.INFO, |
|
datefmt='%Y-%m-%d %H:%M:%S') |
|
logger = logging.getLogger() |
|
|
|
known_models_yaml = None |
|
with open('known_models.yaml', 'r') as f: |
|
known_models_yaml = yaml.load(f.read(), Loader=yaml.Loader) |
|
|
|
def convert(input_model): |
|
last_outputs = glob.glob('*.wifm', root_dir=OUT_DIR) + glob.glob('*.mlpackage', root_dir=OUT_DIR) |
|
for output in last_outputs: |
|
try: |
|
if os.path.isfile(OUT_DIR + '/' + output): |
|
os.remove(OUT_DIR + '/' + output) |
|
else: |
|
shutil.rmtree(OUT_DIR + '/' + output) |
|
except Exception as e: |
|
logger.error('Failed to remove last output file: ' + str(e)) |
|
|
|
if input_model == None: |
|
return None |
|
|
|
file = input_model.name |
|
if not file.endswith('.pth'): |
|
raise gr.Error('Uploaded file is not PyTorch weights.') |
|
digest = None |
|
with open(file, 'rb') as f: |
|
digest = hashlib.sha256(f.read()).hexdigest() |
|
|
|
for model in known_models_yaml['models']: |
|
if digest != model['sha256']: |
|
continue |
|
name = model['name'] |
|
out_file = OUT_DIR + '/' + name + '.wifm' |
|
logger.info('Converting model: %s', name) |
|
command = [ |
|
'python', 'converter.py', |
|
'--type', model['type'], |
|
'--name', name, |
|
'--scale', str(model['scale']), |
|
'--out-dir', OUT_DIR, |
|
'--description', model['description'], |
|
'--source', model['source'], |
|
'--author', model['author'], |
|
'--license', model['license'] |
|
] |
|
if 'cuda' in model and model['cuda']: |
|
command += ['--has-cuda'] |
|
if 'monochrome' in model and model['monochrome']: |
|
command += ['--monochrome'] |
|
if 'features' in model: |
|
command += ['--num-features', str(model['features'])] |
|
if 'blocks' in model: |
|
command += ['--num-blocks', str(model['blocks'])] |
|
if 'convs' in model: |
|
command += ['--num-convs', str(model['convs'])] |
|
if 'shuffle-factor' in model: |
|
command += ['--shuffle-factor', str(model['shuffle-factor'])] |
|
command += [file] |
|
logger.debug('Command: %s', command) |
|
process = subprocess.Popen(command, stdout=subprocess.PIPE) |
|
for c in iter(lambda: process.stdout.read(1), b''): |
|
sys.stdout.buffer.write(c) |
|
sys.stdout.flush() |
|
process.communicate() |
|
if process.returncode != 0: |
|
raise gr.Error('converter.py returned non-zero exit code ' + str(process.returncode)) |
|
if not os.path.exists(out_file): |
|
raise gr.Error('Unknown error') |
|
return out_file |
|
|
|
raise gr.Error('Unknown model. Please create an issue in https://github.com/imxieyi/waifu2x-ios-model-converter if it has a supported architecture.') |
|
|
|
models_string = ''' |
|
|File|Name|Scale|Description|License| |
|
|---|---|---|---|---| |
|
''' |
|
for model in known_models_yaml['models']: |
|
models_string += '|[{}]({})|{}|{}|{}|{}|\n'.format(model['file'].split('/')[-1], model['sourceLink'], model['name'], model['scale'], model['description'].replace('\n', '<br/>'), model['license']) |
|
|
|
iface = gr.Interface( |
|
fn=convert, |
|
inputs='file', |
|
outputs='file', |
|
title='Web waifu2x-ios Model Converter', |
|
description=''' |
|
Please upload the `.pth` model file on the `input_model` panel. After uploading please wait until the output `.wifm` model file appears on the `output` panel. Then simply click `Download` to download converted custom model. |
|
|
|
Only models listed under the converter are supported. If you want another model to be added please create an issue [here](https://github.com/imxieyi/waifu2x-ios-model-converter/issues) or report via app feedback. |
|
''', |
|
article=''' |
|
Supported models (from [upscale.wiki Model Database](https://upscale.wiki/wiki/Model_Database)): |
|
{} |
|
|
|
Project: https://github.com/imxieyi/waifu2x-ios-model-converter |
|
Report issues: https://github.com/imxieyi/waifu2x-ios-model-converter/issues |
|
'''.format(models_string), |
|
live=True, |
|
) |
|
iface.launch() |