Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| import uuid | |
| import time | |
| import threading | |
| from datetime import datetime | |
| from transformers.models.bert import BertForSequenceClassification, BertTokenizer | |
| from transformers.models.roberta import RobertaForSequenceClassification, RobertaTokenizer | |
| # 任务状态管理 | |
| task_results = {} | |
| task_lock = threading.Lock() | |
| # torch.set_grad_enabled(False) | |
| print('Loading Models from HuggingFace...') | |
| # load V3 models only | |
| name_en = "yuchuantian/AIGC_detector_env3" | |
| model_en = RobertaForSequenceClassification.from_pretrained(name_en) | |
| tokenizer_en = RobertaTokenizer.from_pretrained(name_en) | |
| name_en3 = "yuchuantian/AIGC_detector_env3short" | |
| model_en3 = RobertaForSequenceClassification.from_pretrained(name_en3) | |
| name_zh = "yuchuantian/AIGC_detector_zhv3" | |
| model_zh = BertForSequenceClassification.from_pretrained(name_zh) | |
| tokenizer_zh = BertTokenizer.from_pretrained(name_zh) | |
| name_zh4 = "yuchuantian/AIGC_detector_zhv3short" | |
| model_zh4 = BertForSequenceClassification.from_pretrained(name_zh4) | |
| print('Model Loading from HuggingFace Complete!') | |
| def predict_func(text: str, tokenizer, model): | |
| with torch.no_grad(): | |
| inputs = tokenizer(text, return_tensors='pt', max_length=512, truncation=True) | |
| outputs = model(**inputs) | |
| scores = outputs.logits[0].softmax(0).numpy() | |
| result = {"label": scores.argmax().item(), "score": scores.max().item()} | |
| return result | |
| def predict_en(text): | |
| id2label = ['Human', 'AI'] | |
| res = predict_func(text, tokenizer_en, model_en) | |
| return id2label[res['label']], res['score'] | |
| def predict_en3(text): | |
| id2label = ['Human', 'AI'] | |
| res = predict_func(text, tokenizer_en, model_en3) | |
| return id2label[res['label']], res['score'] | |
| def predict_zh(text): | |
| id2label = ['人类', 'AI'] | |
| res = predict_func(text, tokenizer_zh, model_zh) | |
| return id2label[res['label']], res['score'] | |
| def predict_zh4(text): | |
| id2label = ['人类', 'AI'] | |
| res = predict_func(text, tokenizer_zh, model_zh4) | |
| return id2label[res['label']], res['score'] | |
| # 异步任务处理函数 | |
| def process_task_async(task_id, text, model_type): | |
| """异步处理任务""" | |
| try: | |
| with task_lock: | |
| task_results[task_id] = { | |
| "status": "processing", | |
| "created_at": datetime.now().isoformat(), | |
| "result": None, | |
| "error": None | |
| } | |
| # 根据模型类型选择对应的预测函数 | |
| if model_type == "en_v3": | |
| result = predict_en(text) | |
| elif model_type == "en_v3_short": | |
| result = predict_en3(text) | |
| elif model_type == "zh_v3": | |
| result = predict_zh(text) | |
| elif model_type == "zh_v3_short": | |
| result = predict_zh4(text) | |
| else: | |
| raise ValueError(f"Unknown model type: {model_type}") | |
| with task_lock: | |
| task_results[task_id]["status"] = "completed" | |
| task_results[task_id]["result"] = { | |
| "label": result[0], | |
| "score": float(result[1]) | |
| } | |
| task_results[task_id]["completed_at"] = datetime.now().isoformat() | |
| except Exception as e: | |
| with task_lock: | |
| task_results[task_id]["status"] = "error" | |
| task_results[task_id]["error"] = str(e) | |
| task_results[task_id]["completed_at"] = datetime.now().isoformat() | |
| def submit_task(text, model_type): | |
| """提交异步任务""" | |
| task_id = str(uuid.uuid4()) | |
| # 启动后台线程处理任务 | |
| thread = threading.Thread( | |
| target=process_task_async, | |
| args=(task_id, text, model_type) | |
| ) | |
| thread.daemon = True | |
| thread.start() | |
| return { | |
| "task_id": task_id, | |
| "status": "submitted", | |
| "message": "Task submitted successfully" | |
| } | |
| def query_task_result(task_id): | |
| """查询任务结果""" | |
| with task_lock: | |
| if task_id not in task_results: | |
| return { | |
| "error": "Task not found", | |
| "task_id": task_id | |
| } | |
| task_info = task_results[task_id].copy() | |
| task_info["task_id"] = task_id | |
| return task_info | |
| print(predict_en('Peking University is one of the best universities in the world.')) | |
| print(predict_zh('很高兴认识你!')) | |
| with gr.Blocks() as demo: | |
| gr.Markdown(""" | |
| ## AIGC Detector 大模型AI文本检测器 | |
| **News** | |
| 6/25/2025: The detectors are updated! Latest LLMs and reasoning models are now covered! 🔥🔥🔥 | |
| 3/25/2025: Our AIGC Detector demo is available! 🔥🔥🔥 | |
| This app is a demo of our AIGC Detector. If you are interested in our project, please keep tuned at our [GitHub](https://github.com/YuchuanTian/AIGC_text_detector) ! | |
| 本app是我们AIGC检测器工作的DEMO。如果您对我们的工作感兴趣,欢迎在[Github主页](https://github.com/YuchuanTian/AIGC_text_detector)上持续关注我们的工作! | |
| [Paper Link 论文链接](https://arxiv.org/abs/2305.18149) | |
| The loadable versions are as follows 可加载的检测器版本如下: | |
| English: [En-v3](https://huggingface.co/yuchuantian/AIGC_detector_env3) / [En-v3-short](https://huggingface.co/yuchuantian/AIGC_detector_env3short) | |
| Chinese: [Zh-v3](https://huggingface.co/yuchuantian/AIGC_detector_zhv3) / [Zh-v3-short](https://huggingface.co/yuchuantian/AIGC_detector_zhv3short) | |
| Acknowledgement 致谢 | |
| We sincerely thank [Hello-SimpleAI](https://huggingface.co/spaces/Hello-SimpleAI/chatgpt-detector-single) for their code. | |
| """) | |
| with gr.Tab("异步API接口"): | |
| gr.Markdown(""" | |
| ## 异步API接口使用说明 | |
| ### 1. 提交任务接口 | |
| - 函数: `submit_task(text, model_type)` | |
| - 参数: | |
| - text: 要检测的文本 | |
| - model_type: 模型类型 (en_v3, en_v3_short, zh_v3, zh_v3_short) | |
| - 返回: task_id 和状态信息 | |
| ### 2. 查询结果接口 | |
| - 函数: `query_task_result(task_id)` | |
| - 参数: task_id (任务ID) | |
| - 返回: 任务状态和结果 | |
| """) | |
| with gr.Row(): | |
| with gr.Column(): | |
| api_text = gr.Textbox(lines=5, label='文本内容', value="北京大学建立于1898年7月3日") | |
| api_model = gr.Dropdown( | |
| choices=["en_v3", "en_v3_short", "zh_v3", "zh_v3_short"], | |
| value="zh_v3", | |
| label="模型类型" | |
| ) | |
| submit_btn = gr.Button("📤 提交任务") | |
| with gr.Column(): | |
| task_id_input = gr.Textbox(label="任务ID", placeholder="输入任务ID查询结果") | |
| query_btn = gr.Button("🔍 查询结果") | |
| with gr.Row(): | |
| submit_output = gr.JSON(label="提交结果") | |
| query_output = gr.JSON(label="查询结果") | |
| with gr.Tab("中文-V3"): | |
| gr.Markdown(""" | |
| 注意: 本检测器提供的结果仅供参考,应谨慎作为事实依据。 | |
| """) | |
| t2 = gr.Textbox(lines=5, label='文本',value="北京大学建立于1898年7月3日,初名京师大学堂,辛亥革命后于1912年改为北京大学。1938年更名为国立西南联合大学。1946年10月在北平复员。1952年成为以文理学科为主的综合性大学。") | |
| button2 = gr.Button("🚀 检测!") | |
| label2 = gr.Textbox(lines=1, label='预测结果') | |
| score2 = gr.Textbox(lines=1, label='模型概率') | |
| with gr.Tab("中文-V3-短文本"): | |
| gr.Markdown(""" | |
| 注意: 本检测器提供的结果仅供参考,应谨慎作为事实依据。 | |
| """) | |
| t4 = gr.Textbox(lines=5, label='文本',value="北京大学建立于1898年7月3日,初名京师大学堂,辛亥革命后于1912年改为北京大学。1938年更名为国立西南联合大学。1946年10月在北平复员。1952年成为以文理学科为主的综合性大学。") | |
| button4 = gr.Button("🚀 检测!") | |
| label4 = gr.Textbox(lines=1, label='预测结果') | |
| score4 = gr.Textbox(lines=1, label='模型概率') | |
| with gr.Tab("English-V3"): | |
| gr.Markdown(""" | |
| Note: The results are for reference only; they could not be used as factual evidence. | |
| """) | |
| t1 = gr.Textbox(lines=5, label='Text',value="Originated as the Imperial University of Peking in 1898, Peking University was China's first national comprehensive university and the supreme education authority at the time. Since the founding of the People's Republic of China in 1949, it has developed into a comprehensive university with fundamental education and research in both humanities and science. The reform and opening-up of China in 1978 has ushered in a new era for the University unseen in history.") | |
| button1 = gr.Button("🚀 Predict!") | |
| label1 = gr.Textbox(lines=1, label='Predicted Label') | |
| score1 = gr.Textbox(lines=1, label='Probability') | |
| with gr.Tab("English-V3-Short"): | |
| gr.Markdown(""" | |
| Note: The results are for reference only; they could not be used as factual evidence. | |
| """) | |
| t3 = gr.Textbox(lines=5, label='Text',value="Originated as the Imperial University of Peking in 1898, Peking University was China's first national comprehensive university and the supreme education authority at the time. Since the founding of the People's Republic of China in 1949, it has developed into a comprehensive university with fundamental education and research in both humanities and science. The reform and opening-up of China in 1978 has ushered in a new era for the University unseen in history.") | |
| button3 = gr.Button("🚀 Predict!") | |
| label3 = gr.Textbox(lines=1, label='Predicted Label') | |
| score3 = gr.Textbox(lines=1, label='Probability') | |
| # 绑定事件 | |
| submit_btn.click(submit_task, inputs=[api_text, api_model], outputs=[submit_output]) | |
| query_btn.click(query_task_result, inputs=[task_id_input], outputs=[query_output]) | |
| button1.click(predict_en, inputs=[t1], outputs=[label1,score1]) | |
| button2.click(predict_zh, inputs=[t2], outputs=[label2,score2]) | |
| button3.click(predict_en3, inputs=[t3], outputs=[label3,score3]) | |
| button4.click(predict_zh4, inputs=[t4], outputs=[label4,score4]) | |
| # Page Count | |
| gr.Markdown(""" | |
| <center><a href='https://clustrmaps.com/site/1bsdc' title='Visit tracker'><img src='//clustrmaps.com/map_v2.png?cl=080808&w=a&t=tt&d=NXQdnwxvIm27veMbB5F7oHNID09nhSvkBRZ_Aji9eIA&co=ffffff&ct=808080'/></a></center> | |
| """) | |
| # 启用队列模式以支持真正的异步处理 | |
| demo.queue() | |
| demo.launch() | |