Spaces:
Sleeping
Sleeping
import gradio as gr | |
import os | |
MS_TOKEN=os.environ['MS_TOKEN'] if 'MS_TOKEN' in os.environ else '' | |
HF_TOKEN=os.environ['HF_TOKEN'] if 'HF_TOKEN' in os.environ else '' | |
def get_cache_dir(): | |
from random_word import RandomWords | |
r = RandomWords() | |
return r.get_random_word() | |
def check_disk(): | |
import os | |
return os.system("df -h /") | |
def pull_from_ms(repo_id, cache_dir, filename=''): | |
from modelscope import HubApi | |
from modelscope import snapshot_download | |
from modelscope.hub.file_download import model_file_download | |
if MS_TOKEN and MS_TOKEN.strip(): | |
api=HubApi() | |
api.login(MS_TOKEN) | |
if filename: | |
model_path = model_file_download( | |
model_id=repo_id, file_path=filename, cache_dir=cache_dir) | |
else: | |
model_path = snapshot_download( | |
repo_id, cache_dir=cache_dir) | |
return f'Pulled {repo_id} to temp folder {cache_dir}: {model_path}' | |
def remove_file(repo_id, cache_dir, filename): | |
import os | |
try: | |
os.remove(f'{cache_dir}/{repo_id}/{filename}') | |
except: | |
return '' | |
return 'README.md file removed' | |
def push_to_hf(cache_dir, ms_repo_id, hf_repo_id): | |
from huggingface_hub import HfApi | |
if not HF_TOKEN: | |
raise gr.Error("Please enter your HF_TOKEN") | |
api = HfApi(token=HF_TOKEN) # Token is not persisted on the machine. | |
output = api.upload_folder( | |
folder_path=f"{cache_dir}/{ms_repo_id}", | |
repo_id=hf_repo_id, | |
repo_type="model", | |
) | |
return f'Pushed to {hf_repo_id}' | |
def handle(ms_repo_id, hf_repo_id): | |
cache_dir = get_cache_dir() | |
stages = [ | |
(check_disk, (), {}), | |
# # Run all the sanity checks on README.md | |
# (pull_from_ms, (ms_repo_id, cache_dir, 'README.md'), {}), | |
# (push_to_hf, (cache_dir, ms_repo_id, hf_repo_id), {}), | |
# Push other files | |
(pull_from_ms, (ms_repo_id, cache_dir), {}), | |
(remove_file, (ms_repo_id, cache_dir, 'README.md'), {}), | |
(check_disk, (), {}), | |
(push_to_hf, (cache_dir, ms_repo_id, hf_repo_id), {}), | |
(check_disk, (), {}), | |
] | |
results = [] | |
errors = [] | |
for func, args, kwargs in stages: | |
try: | |
results.append(str(func(*args, **kwargs))) | |
except Exception as e: | |
errors.append(str(e)) | |
if errors: | |
break | |
return '\n\n'.join(results), '\n\n'.join(errors) | |
with gr.Blocks() as demo: | |
gr.Markdown(''' | |
This space uploads model from ModelScope to Huggingface. | |
**Please make sure that you're the owner of the repo or have permission from the owner to do so!** | |
# How to use this Space? | |
- **Duplicate this Space and providing MS token (optional) and your read/write HF token (mandatory)** | |
- Create your target model repo on HF. This step needs to be done manually. The Space doesn't do create an empty repo for you. | |
- In your own private Space, fill in information below. | |
- Click submit then watch for output in container log for progress. | |
- Create README.md file (since the metadata is not compatible with HF) | |
''') | |
ms_repo_id = gr.Textbox(label="Model Scope Repo ID (case sensitive)") | |
hf_repo_id = gr.Textbox(label="Target HF Model Repo ID (case sensitive). \nPlease make sure that this model has already been created") | |
with gr.Row(): | |
button = gr.Button("Submit", variant="primary") | |
clear = gr.Button("Clear") | |
error = gr.Textbox(label="Error") | |
output = gr.Textbox(label="Output") | |
button.click(handle, [ms_repo_id, hf_repo_id], [output, error]) | |
if __name__ == "__main__": | |
demo.launch(debug = True) | |