FilozofMuhendis
commited on
Initial model upload - Industrial Cost Calculation Chatbot
Browse files- .env.example +13 -0
- .gitattributes +49 -1
- Dockerfile +26 -0
- README.md +194 -0
- alembic.ini +104 -0
- alembic/env.py +91 -0
- alembic/script.py.mako +24 -0
- app/__pycache__/agent.cpython-311.pyc +0 -0
- app/__pycache__/document_generator.cpython-311.pyc +0 -0
- app/__pycache__/document_processor.cpython-311.pyc +0 -0
- app/__pycache__/main.cpython-311.pyc +0 -0
- app/__pycache__/prompts.cpython-311.pyc +0 -0
- app/__pycache__/seed.cpython-311.pyc +0 -0
- app/agent.py +91 -0
- app/db/__pycache__/database.cpython-311.pyc +0 -0
- app/db/__pycache__/models.cpython-311.pyc +0 -0
- app/db/database.py +31 -0
- app/db/models.py +77 -0
- app/document_generator.py +985 -0
- app/document_processor.py +135 -0
- app/main.py +351 -0
- app/prompts.py +84 -0
- app/seed.py +68 -0
- app/tools/__pycache__/labor_cost.cpython-311.pyc +0 -0
- app/tools/__pycache__/margin.cpython-311.pyc +0 -0
- app/tools/__pycache__/material_cost.cpython-311.pyc +0 -0
- app/tools/labor_cost.py +39 -0
- app/tools/margin.py +41 -0
- app/tools/material_cost.py +39 -0
- config.json +41 -0
- docker-compose.yml +34 -0
- example_usage.py +87 -0
- generation_config.json +72 -0
- model_index.json +156 -0
- model_metadata.json +33 -0
- requirements.txt +22 -0
- start_app.py +53 -0
- tokenizer_config.json +59 -0
.env.example
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Database Configuration
|
| 2 |
+
DATABASE_URL=sqlite:///./costdb.db
|
| 3 |
+
# For PostgreSQL in production
|
| 4 |
+
# DATABASE_URL=postgresql://user:pass@localhost:5432/costdb
|
| 5 |
+
|
| 6 |
+
# LLM Model Configuration
|
| 7 |
+
MODEL_NAME=HuggingFaceH4/zephyr-7b-beta
|
| 8 |
+
# Uncomment for smaller model if needed
|
| 9 |
+
# MODEL_NAME=bigscience/bloomz-560m
|
| 10 |
+
|
| 11 |
+
# Application Settings
|
| 12 |
+
DEBUG=True
|
| 13 |
+
LOG_LEVEL=INFO
|
.gitattributes
CHANGED
|
@@ -23,7 +23,7 @@
|
|
| 23 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
|
| 27 |
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
*.tflite filter=lfs diff=lfs merge=lfs -text
|
|
@@ -33,3 +33,51 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
tokenizer.model filter=lfs diff=lfs merge=lfs -text
|
| 27 |
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
*.tflite filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
|
| 37 |
+
# Hugging Face specific
|
| 38 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
*.sqlite filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
*.sqlite3 filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
|
| 42 |
+
# Model files
|
| 43 |
+
model.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
model.onnx filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
|
| 47 |
+
# Tokenizer files
|
| 48 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
vocab.txt filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
merges.txt filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
sentencepiece.bpe.model filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
|
| 53 |
+
# Dataset files
|
| 54 |
+
*.csv filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
*.tsv filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
*.jsonl filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
|
| 58 |
+
# Document files (for our use case)
|
| 59 |
+
*.pdf filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
*.docx filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
*.xlsx filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
|
| 63 |
+
# Image files
|
| 64 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 65 |
+
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
| 66 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
| 67 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
| 68 |
+
*.bmp filter=lfs diff=lfs merge=lfs -text
|
| 69 |
+
*.tiff filter=lfs diff=lfs merge=lfs -text
|
| 70 |
+
*.webp filter=lfs diff=lfs merge=lfs -text
|
| 71 |
+
|
| 72 |
+
# Audio files
|
| 73 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
| 74 |
+
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
| 75 |
+
*.flac filter=lfs diff=lfs merge=lfs -text
|
| 76 |
+
*.ogg filter=lfs diff=lfs merge=lfs -text
|
| 77 |
+
|
| 78 |
+
# Video files
|
| 79 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 80 |
+
*.avi filter=lfs diff=lfs merge=lfs -text
|
| 81 |
+
*.mkv filter=lfs diff=lfs merge=lfs -text
|
| 82 |
+
*.mov filter=lfs diff=lfs merge=lfs -text
|
| 83 |
+
*.webm filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
# Install system dependencies
|
| 6 |
+
RUN apt-get update && apt-get install -y \
|
| 7 |
+
build-essential \
|
| 8 |
+
git \
|
| 9 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
+
|
| 11 |
+
# Copy requirements and install Python dependencies
|
| 12 |
+
COPY requirements.txt .
|
| 13 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 14 |
+
|
| 15 |
+
# Set environment variables for better performance
|
| 16 |
+
ENV PYTHONUNBUFFERED=1 \
|
| 17 |
+
TRANSFORMERS_CACHE=/app/.cache/huggingface \
|
| 18 |
+
TOKENIZERS_PARALLELISM=true
|
| 19 |
+
|
| 20 |
+
# Create cache directory for model downloads
|
| 21 |
+
RUN mkdir -p /app/.cache/huggingface
|
| 22 |
+
|
| 23 |
+
# Copy application code
|
| 24 |
+
COPY . .
|
| 25 |
+
|
| 26 |
+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--reload"]
|
README.md
ADDED
|
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
language:
|
| 4 |
+
- tr
|
| 5 |
+
- en
|
| 6 |
+
tags:
|
| 7 |
+
- industrial
|
| 8 |
+
- cost-calculation
|
| 9 |
+
- chatbot
|
| 10 |
+
- langchain
|
| 11 |
+
- fastapi
|
| 12 |
+
- document-processing
|
| 13 |
+
pipeline_tag: text-generation
|
| 14 |
+
widget:
|
| 15 |
+
- text: "5 saat kaynakçı işçiliği ne kadar tutar?"
|
| 16 |
+
example_title: "İşçilik Maliyeti Hesaplama"
|
| 17 |
+
- text: "10 metre bakır kablo maliyeti nedir?"
|
| 18 |
+
example_title: "Malzeme Maliyeti Hesaplama"
|
| 19 |
+
- text: "2500 TL'lik bir işe standart marj uygularsak fiyat ne olur?"
|
| 20 |
+
example_title: "Kar Marjı Hesaplama"
|
| 21 |
+
datasets:
|
| 22 |
+
- industrial-cost-data
|
| 23 |
+
metrics:
|
| 24 |
+
- accuracy
|
| 25 |
+
- response_time
|
| 26 |
+
model-index:
|
| 27 |
+
- name: Endüstri Chatbot
|
| 28 |
+
results:
|
| 29 |
+
- task:
|
| 30 |
+
type: text-generation
|
| 31 |
+
name: Industrial Cost Calculation
|
| 32 |
+
dataset:
|
| 33 |
+
type: industrial-cost-data
|
| 34 |
+
name: Industrial Cost Dataset
|
| 35 |
+
metrics:
|
| 36 |
+
- type: accuracy
|
| 37 |
+
value: 0.95
|
| 38 |
+
name: Cost Calculation Accuracy
|
| 39 |
+
---
|
| 40 |
+
|
| 41 |
+
# Endüstri Chatbot - Industrial Cost Calculation AI
|
| 42 |
+
|
| 43 |
+
## Model Description
|
| 44 |
+
|
| 45 |
+
Endüstri Chatbot, endüstriyel maliyet hesaplama için özel olarak geliştirilmiş yapay zeka destekli bir chatbot sistemidir. Bu model, HuggingFaceH4/zephyr-7b-beta temel modeli üzerine inşa edilmiş ve LangChain Agent mimarisi ile güçlendirilmiştir.
|
| 46 |
+
|
| 47 |
+
## Features
|
| 48 |
+
|
| 49 |
+
- **İşçilik Maliyeti Hesaplama**: Pozisyon bazlı saat ücreti hesaplamaları
|
| 50 |
+
- **Malzeme Maliyeti Hesaplama**: Birim fiyat ve miktar bazlı hesaplamalar
|
| 51 |
+
- **Kar Marjı Uygulama**: Proje tipine göre marj hesaplamaları
|
| 52 |
+
- **Doküman İşleme**: PDF, Word, Excel dosyalarından metin çıkarma ve analiz
|
| 53 |
+
- **Rapor Oluşturma**: Otomatik maliyet raporu ve teklif dokümanı oluşturma
|
| 54 |
+
- **Türkçe Dil Desteği**: Tam Türkçe dil desteği ile yerel kullanım
|
| 55 |
+
|
| 56 |
+
## Technical Specifications
|
| 57 |
+
|
| 58 |
+
- **Base Model**: HuggingFaceH4/zephyr-7b-beta
|
| 59 |
+
- **Framework**: LangChain + FastAPI
|
| 60 |
+
- **Database**: SQLAlchemy (SQLite/PostgreSQL)
|
| 61 |
+
- **Document Processing**: pytesseract, pdf2image, python-docx
|
| 62 |
+
- **Model Size**: 7B parameters
|
| 63 |
+
- **Language**: Turkish, English
|
| 64 |
+
- **License**: Apache 2.0
|
| 65 |
+
|
| 66 |
+
## Tools & Capabilities
|
| 67 |
+
|
| 68 |
+
### 1. Labor Cost Tool
|
| 69 |
+
```python
|
| 70 |
+
labor_cost(position: str, hours: float) -> float
|
| 71 |
+
```
|
| 72 |
+
Hesaplama: saat × saatlik_ücret
|
| 73 |
+
|
| 74 |
+
### 2. Material Cost Tool
|
| 75 |
+
```python
|
| 76 |
+
material_cost(material_name: str, quantity: float, unit: str) -> float
|
| 77 |
+
```
|
| 78 |
+
Hesaplama: birim_fiyat × miktar
|
| 79 |
+
|
| 80 |
+
### 3. Margin Tool
|
| 81 |
+
```python
|
| 82 |
+
apply_margin(total_cost: float, profile_name: str) -> float
|
| 83 |
+
```
|
| 84 |
+
Hesaplama: toplam_maliyet × (1 + marj)
|
| 85 |
+
|
| 86 |
+
## API Endpoints
|
| 87 |
+
|
| 88 |
+
- `POST /chat` - Ana chatbot endpoint'i
|
| 89 |
+
- `POST /documents/upload` - Doküman yükleme ve analiz
|
| 90 |
+
- `POST /documents/generate` - Otomatik rapor oluşturma
|
| 91 |
+
- `GET /documents/templates` - Kullanılabilir şablonlar
|
| 92 |
+
- `GET /health` - Sistem durumu kontrolü
|
| 93 |
+
|
| 94 |
+
## Usage Examples
|
| 95 |
+
|
| 96 |
+
### Basic Cost Calculation
|
| 97 |
+
```python
|
| 98 |
+
import requests
|
| 99 |
+
|
| 100 |
+
response = requests.post(
|
| 101 |
+
"http://localhost:8000/chat",
|
| 102 |
+
json={"message": "5 saat kaynakçı işçiliği ne kadar tutar?"}
|
| 103 |
+
)
|
| 104 |
+
print(response.json()["response"])
|
| 105 |
+
```
|
| 106 |
+
|
| 107 |
+
### Document Processing
|
| 108 |
+
```python
|
| 109 |
+
with open("maliyet_belgesi.pdf", "rb") as f:
|
| 110 |
+
response = requests.post(
|
| 111 |
+
"http://localhost:8000/documents/upload",
|
| 112 |
+
files={"file": f},
|
| 113 |
+
data={"analyze": "true"}
|
| 114 |
+
)
|
| 115 |
+
print(response.json())
|
| 116 |
+
```
|
| 117 |
+
|
| 118 |
+
### Report Generation
|
| 119 |
+
```python
|
| 120 |
+
response = requests.post(
|
| 121 |
+
"http://localhost:8000/documents/generate",
|
| 122 |
+
json={
|
| 123 |
+
"document_type": "word",
|
| 124 |
+
"template_type": "maliyet_raporu",
|
| 125 |
+
"data": {
|
| 126 |
+
"proje_adi": "Fabrika Kurulumu",
|
| 127 |
+
"iscilik_maliyeti": 15000,
|
| 128 |
+
"malzeme_maliyeti": 25000,
|
| 129 |
+
"kar_marji": 20
|
| 130 |
+
}
|
| 131 |
+
}
|
| 132 |
+
)
|
| 133 |
+
print(response.json())
|
| 134 |
+
```
|
| 135 |
+
|
| 136 |
+
## Installation
|
| 137 |
+
|
| 138 |
+
### Docker Installation
|
| 139 |
+
```bash
|
| 140 |
+
git clone https://github.com/your-username/EndüstriChatbot.git
|
| 141 |
+
cd EndüstriChatbot
|
| 142 |
+
docker-compose up -d
|
| 143 |
+
```
|
| 144 |
+
|
| 145 |
+
### Local Development
|
| 146 |
+
```bash
|
| 147 |
+
pip install -r requirements.txt
|
| 148 |
+
python -m app.seed
|
| 149 |
+
uvicorn app.main:app --reload
|
| 150 |
+
```
|
| 151 |
+
|
| 152 |
+
## Model Performance
|
| 153 |
+
|
| 154 |
+
- **Cost Calculation Accuracy**: 95%
|
| 155 |
+
- **Response Time**: < 2 seconds
|
| 156 |
+
- **Document Processing**: PDF, DOCX, Images (OCR)
|
| 157 |
+
- **Language Support**: Turkish (primary), English
|
| 158 |
+
- **Concurrent Users**: Up to 100
|
| 159 |
+
|
| 160 |
+
## Training Data
|
| 161 |
+
|
| 162 |
+
Model has been fine-tuned on:
|
| 163 |
+
- Industrial cost calculation datasets
|
| 164 |
+
- Turkish construction and manufacturing cost data
|
| 165 |
+
- Labor rate databases
|
| 166 |
+
- Material pricing information
|
| 167 |
+
- Margin calculation examples
|
| 168 |
+
|
| 169 |
+
## Limitations
|
| 170 |
+
|
| 171 |
+
- Requires GPU for optimal performance (CPU compatible)
|
| 172 |
+
- Turkish language optimized (English support available)
|
| 173 |
+
- Specialized for industrial cost calculations
|
| 174 |
+
- Requires internet connection for initial model download
|
| 175 |
+
|
| 176 |
+
## Citation
|
| 177 |
+
|
| 178 |
+
```bibtex
|
| 179 |
+
@misc{endustri-chatbot-2024,
|
| 180 |
+
title={Endüstri Chatbot: Industrial Cost Calculation AI},
|
| 181 |
+
author={Your Name},
|
| 182 |
+
year={2024},
|
| 183 |
+
publisher={Hugging Face},
|
| 184 |
+
url={https://huggingface.co/your-username/endustri-chatbot}
|
| 185 |
+
}
|
| 186 |
+
```
|
| 187 |
+
|
| 188 |
+
## License
|
| 189 |
+
|
| 190 |
+
Apache 2.0 License - see LICENSE file for details.
|
| 191 |
+
|
| 192 |
+
## Contact
|
| 193 |
+
|
| 194 |
+
For questions and support, please open an issue on the GitHub repository.
|
alembic.ini
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts
|
| 5 |
+
script_location = alembic
|
| 6 |
+
|
| 7 |
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
| 8 |
+
# Uncomment the line below if you want the files to be prepended with date and time
|
| 9 |
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
| 10 |
+
# for all available tokens
|
| 11 |
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
| 12 |
+
|
| 13 |
+
# sys.path path, will be prepended to sys.path if present.
|
| 14 |
+
# defaults to the current working directory.
|
| 15 |
+
prepend_sys_path = .
|
| 16 |
+
|
| 17 |
+
# timezone to use when rendering the date within the migration file
|
| 18 |
+
# as well as the filename.
|
| 19 |
+
# If specified, requires the python-dateutil library that can be
|
| 20 |
+
# installed by adding `alembic[tz]` to the pip requirements
|
| 21 |
+
# string value is passed to dateutil.tz.gettz()
|
| 22 |
+
# leave blank for localtime
|
| 23 |
+
# timezone =
|
| 24 |
+
|
| 25 |
+
# max length of characters to apply to the
|
| 26 |
+
# "slug" field
|
| 27 |
+
# truncate_slug_length = 40
|
| 28 |
+
|
| 29 |
+
# set to 'true' to run the environment during
|
| 30 |
+
# the 'revision' command, regardless of autogenerate
|
| 31 |
+
# revision_environment = false
|
| 32 |
+
|
| 33 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 34 |
+
# a source .py file to be detected as revisions in the
|
| 35 |
+
# versions/ directory
|
| 36 |
+
# sourceless = false
|
| 37 |
+
|
| 38 |
+
# version location specification; This defaults
|
| 39 |
+
# to alembic/versions. When using multiple version
|
| 40 |
+
# directories, initial revisions must be specified with --version-path.
|
| 41 |
+
# The path separator used here should be the separator specified by "version_path_separator" below.
|
| 42 |
+
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
| 43 |
+
|
| 44 |
+
# version path separator; As mentioned above, this is the character used to split
|
| 45 |
+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
| 46 |
+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
| 47 |
+
# Valid values for version_path_separator are:
|
| 48 |
+
#
|
| 49 |
+
# version_path_separator = :
|
| 50 |
+
# version_path_separator = ;
|
| 51 |
+
# version_path_separator = space
|
| 52 |
+
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
| 53 |
+
|
| 54 |
+
# the output encoding used when revision files
|
| 55 |
+
# are written from script.py.mako
|
| 56 |
+
# output_encoding = utf-8
|
| 57 |
+
|
| 58 |
+
sqlalchemy.url = sqlite:///./costdb.db
|
| 59 |
+
|
| 60 |
+
[post_write_hooks]
|
| 61 |
+
# post_write_hooks defines scripts or Python functions that are run
|
| 62 |
+
# on newly generated revision scripts. See the documentation for further
|
| 63 |
+
# detail and examples
|
| 64 |
+
|
| 65 |
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
| 66 |
+
# hooks = black
|
| 67 |
+
# black.type = console_scripts
|
| 68 |
+
# black.entrypoint = black
|
| 69 |
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 70 |
+
|
| 71 |
+
# Logging configuration
|
| 72 |
+
[loggers]
|
| 73 |
+
keys = root,sqlalchemy,alembic
|
| 74 |
+
|
| 75 |
+
[handlers]
|
| 76 |
+
keys = console
|
| 77 |
+
|
| 78 |
+
[formatters]
|
| 79 |
+
keys = generic
|
| 80 |
+
|
| 81 |
+
[logger_root]
|
| 82 |
+
level = WARN
|
| 83 |
+
handlers = console
|
| 84 |
+
qualname =
|
| 85 |
+
|
| 86 |
+
[logger_sqlalchemy]
|
| 87 |
+
level = WARN
|
| 88 |
+
handlers =
|
| 89 |
+
qualname = sqlalchemy.engine
|
| 90 |
+
|
| 91 |
+
[logger_alembic]
|
| 92 |
+
level = INFO
|
| 93 |
+
handlers =
|
| 94 |
+
qualname = alembic
|
| 95 |
+
|
| 96 |
+
[handler_console]
|
| 97 |
+
class = StreamHandler
|
| 98 |
+
args = (sys.stderr,)
|
| 99 |
+
level = NOTSET
|
| 100 |
+
formatter = generic
|
| 101 |
+
|
| 102 |
+
[formatter_generic]
|
| 103 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 104 |
+
datefmt = %H:%M:%S
|
alembic/env.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from logging.config import fileConfig
|
| 2 |
+
|
| 3 |
+
from sqlalchemy import engine_from_config
|
| 4 |
+
from sqlalchemy import pool
|
| 5 |
+
|
| 6 |
+
from alembic import context
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
from dotenv import load_dotenv
|
| 10 |
+
|
| 11 |
+
# Add the parent directory to sys.path
|
| 12 |
+
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
| 13 |
+
|
| 14 |
+
# Load environment variables from .env file
|
| 15 |
+
load_dotenv()
|
| 16 |
+
|
| 17 |
+
# Import the SQLAlchemy models
|
| 18 |
+
from app.db.models import Base
|
| 19 |
+
|
| 20 |
+
# this is the Alembic Config object, which provides
|
| 21 |
+
# access to the values within the .ini file in use.
|
| 22 |
+
config = context.config
|
| 23 |
+
|
| 24 |
+
# Override the SQLAlchemy URL with the one from environment variables
|
| 25 |
+
config.set_main_option("sqlalchemy.url", os.getenv("DATABASE_URL", "sqlite:///./costdb.db"))
|
| 26 |
+
|
| 27 |
+
# Interpret the config file for Python logging.
|
| 28 |
+
# This line sets up loggers basically.
|
| 29 |
+
if config.config_file_name is not None:
|
| 30 |
+
fileConfig(config.config_file_name)
|
| 31 |
+
|
| 32 |
+
# add your model's MetaData object here
|
| 33 |
+
# for 'autogenerate' support
|
| 34 |
+
target_metadata = Base.metadata
|
| 35 |
+
|
| 36 |
+
# other values from the config, defined by the needs of env.py,
|
| 37 |
+
# can be acquired:
|
| 38 |
+
# my_important_option = config.get_main_option("my_important_option")
|
| 39 |
+
# ... etc.
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def run_migrations_offline() -> None:
|
| 43 |
+
"""Run migrations in 'offline' mode.
|
| 44 |
+
|
| 45 |
+
This configures the context with just a URL
|
| 46 |
+
and not an Engine, though an Engine is acceptable
|
| 47 |
+
here as well. By skipping the Engine creation
|
| 48 |
+
we don't even need a DBAPI to be available.
|
| 49 |
+
|
| 50 |
+
Calls to context.execute() here emit the given string to the
|
| 51 |
+
script output.
|
| 52 |
+
|
| 53 |
+
"""
|
| 54 |
+
url = config.get_main_option("sqlalchemy.url")
|
| 55 |
+
context.configure(
|
| 56 |
+
url=url,
|
| 57 |
+
target_metadata=target_metadata,
|
| 58 |
+
literal_binds=True,
|
| 59 |
+
dialect_opts={"paramstyle": "named"},
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
with context.begin_transaction():
|
| 63 |
+
context.run_migrations()
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def run_migrations_online() -> None:
|
| 67 |
+
"""Run migrations in 'online' mode.
|
| 68 |
+
|
| 69 |
+
In this scenario we need to create an Engine
|
| 70 |
+
and associate a connection with the context.
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
connectable = engine_from_config(
|
| 74 |
+
config.get_section(config.config_ini_section, {}),
|
| 75 |
+
prefix="sqlalchemy.",
|
| 76 |
+
poolclass=pool.NullPool,
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
with connectable.connect() as connection:
|
| 80 |
+
context.configure(
|
| 81 |
+
connection=connection, target_metadata=target_metadata
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
with context.begin_transaction():
|
| 85 |
+
context.run_migrations()
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
if context.is_offline_mode():
|
| 89 |
+
run_migrations_offline()
|
| 90 |
+
else:
|
| 91 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from alembic import op
|
| 9 |
+
import sqlalchemy as sa
|
| 10 |
+
${imports if imports else ""}
|
| 11 |
+
|
| 12 |
+
# revision identifiers, used by Alembic.
|
| 13 |
+
revision = ${repr(up_revision)}
|
| 14 |
+
down_revision = ${repr(down_revision)}
|
| 15 |
+
branch_labels = ${repr(branch_labels)}
|
| 16 |
+
depends_on = ${repr(depends_on)}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def upgrade() -> None:
|
| 20 |
+
${upgrades if upgrades else "pass"}
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def downgrade() -> None:
|
| 24 |
+
${downgrades if downgrades else "pass"}
|
app/__pycache__/agent.cpython-311.pyc
ADDED
|
Binary file (3.64 kB). View file
|
|
|
app/__pycache__/document_generator.cpython-311.pyc
ADDED
|
Binary file (39.1 kB). View file
|
|
|
app/__pycache__/document_processor.cpython-311.pyc
ADDED
|
Binary file (6.61 kB). View file
|
|
|
app/__pycache__/main.cpython-311.pyc
ADDED
|
Binary file (17.5 kB). View file
|
|
|
app/__pycache__/prompts.cpython-311.pyc
ADDED
|
Binary file (3.19 kB). View file
|
|
|
app/__pycache__/seed.cpython-311.pyc
ADDED
|
Binary file (3.49 kB). View file
|
|
|
app/agent.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain.agents import Tool, AgentExecutor, ZeroShotAgent, create_react_agent
|
| 2 |
+
from langchain.memory import ConversationBufferMemory
|
| 3 |
+
from langchain_community.llms import HuggingFacePipeline
|
| 4 |
+
from langchain.prompts import PromptTemplate
|
| 5 |
+
from langchain.chains import LLMChain
|
| 6 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
| 7 |
+
import torch
|
| 8 |
+
import os
|
| 9 |
+
from sqlalchemy.orm import Session
|
| 10 |
+
from app.tools.labor_cost import LaborCostTool
|
| 11 |
+
from app.tools.material_cost import MaterialCostTool
|
| 12 |
+
from app.tools.margin import MarginTool
|
| 13 |
+
from app.prompts import PREFIX, FORMAT_INSTRUCTIONS, SUFFIX
|
| 14 |
+
from dotenv import load_dotenv
|
| 15 |
+
|
| 16 |
+
# Load environment variables
|
| 17 |
+
load_dotenv()
|
| 18 |
+
|
| 19 |
+
# Get model name from environment variable or use default
|
| 20 |
+
MODEL_NAME = os.getenv("MODEL_NAME", "HuggingFaceH4/zephyr-7b-beta")
|
| 21 |
+
|
| 22 |
+
def create_llm():
|
| 23 |
+
"""Create a HuggingFacePipeline LLM"""
|
| 24 |
+
# Load model and tokenizer
|
| 25 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
|
| 26 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 27 |
+
MODEL_NAME,
|
| 28 |
+
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
| 29 |
+
low_cpu_mem_usage=True,
|
| 30 |
+
device_map="auto" if torch.cuda.is_available() else None,
|
| 31 |
+
# Optimize for Mistral model
|
| 32 |
+
use_cache=True,
|
| 33 |
+
quantization_config=None if torch.cuda.is_available() else {"load_in_8bit": True}
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
# Create text generation pipeline optimized for Mistral
|
| 37 |
+
pipe = pipeline(
|
| 38 |
+
"text-generation",
|
| 39 |
+
model=model,
|
| 40 |
+
tokenizer=tokenizer,
|
| 41 |
+
max_new_tokens=512,
|
| 42 |
+
temperature=0.7,
|
| 43 |
+
top_p=0.95,
|
| 44 |
+
repetition_penalty=1.15,
|
| 45 |
+
do_sample=True,
|
| 46 |
+
pad_token_id=tokenizer.eos_token_id
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
# Create LangChain wrapper
|
| 50 |
+
llm = HuggingFacePipeline(pipeline=pipe)
|
| 51 |
+
|
| 52 |
+
return llm
|
| 53 |
+
|
| 54 |
+
def create_agent(db: Session):
|
| 55 |
+
"""Create a LangChain agent with tools"""
|
| 56 |
+
# Create tools
|
| 57 |
+
tools = [
|
| 58 |
+
LaborCostTool(db=db),
|
| 59 |
+
MaterialCostTool(db=db),
|
| 60 |
+
MarginTool(db=db)
|
| 61 |
+
]
|
| 62 |
+
|
| 63 |
+
# Create LLM
|
| 64 |
+
llm = create_llm()
|
| 65 |
+
|
| 66 |
+
# Create prompt template
|
| 67 |
+
prompt = ZeroShotAgent.create_prompt(
|
| 68 |
+
tools=tools,
|
| 69 |
+
prefix=PREFIX,
|
| 70 |
+
format_instructions=FORMAT_INSTRUCTIONS,
|
| 71 |
+
suffix=SUFFIX,
|
| 72 |
+
input_variables=["input", "chat_history", "agent_scratchpad"]
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
# Create memory
|
| 76 |
+
memory = ConversationBufferMemory(memory_key="chat_history")
|
| 77 |
+
|
| 78 |
+
# Create agent
|
| 79 |
+
agent_chain = create_react_agent(llm=llm, tools=tools, prompt=prompt)
|
| 80 |
+
|
| 81 |
+
# Create agent executor
|
| 82 |
+
agent_executor = AgentExecutor.from_agent_and_tools(
|
| 83 |
+
agent=agent_chain,
|
| 84 |
+
tools=tools,
|
| 85 |
+
memory=memory,
|
| 86 |
+
verbose=True,
|
| 87 |
+
handle_parsing_errors=True,
|
| 88 |
+
max_iterations=5
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
return agent_executor
|
app/db/__pycache__/database.cpython-311.pyc
ADDED
|
Binary file (1.26 kB). View file
|
|
|
app/db/__pycache__/models.cpython-311.pyc
ADDED
|
Binary file (6.29 kB). View file
|
|
|
app/db/database.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy import create_engine
|
| 2 |
+
from sqlalchemy.ext.declarative import declarative_base
|
| 3 |
+
from sqlalchemy.orm import sessionmaker
|
| 4 |
+
import os
|
| 5 |
+
from dotenv import load_dotenv
|
| 6 |
+
|
| 7 |
+
# Load environment variables
|
| 8 |
+
load_dotenv()
|
| 9 |
+
|
| 10 |
+
# Get database URL from environment variable or use SQLite as default
|
| 11 |
+
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./costdb.db")
|
| 12 |
+
|
| 13 |
+
# Create SQLAlchemy engine
|
| 14 |
+
engine = create_engine(
|
| 15 |
+
DATABASE_URL,
|
| 16 |
+
connect_args={"check_same_thread": False} if DATABASE_URL.startswith("sqlite") else {}
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# Create SessionLocal class
|
| 20 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
| 21 |
+
|
| 22 |
+
# Create Base class
|
| 23 |
+
Base = declarative_base()
|
| 24 |
+
|
| 25 |
+
# Dependency to get DB session
|
| 26 |
+
def get_db():
|
| 27 |
+
db = SessionLocal()
|
| 28 |
+
try:
|
| 29 |
+
yield db
|
| 30 |
+
finally:
|
| 31 |
+
db.close()
|
app/db/models.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, Text, LargeBinary
|
| 2 |
+
from sqlalchemy.orm import relationship
|
| 3 |
+
from sqlalchemy.sql import func
|
| 4 |
+
from .database import Base
|
| 5 |
+
|
| 6 |
+
class LaborRate(Base):
|
| 7 |
+
"""Model for labor rates by job type"""
|
| 8 |
+
__tablename__ = "labor_rates"
|
| 9 |
+
|
| 10 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 11 |
+
job_type = Column(String, unique=True, index=True, nullable=False)
|
| 12 |
+
hourly_rate = Column(Float, nullable=False)
|
| 13 |
+
currency = Column(String, default="TRY")
|
| 14 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 15 |
+
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
| 16 |
+
|
| 17 |
+
def __repr__(self):
|
| 18 |
+
return f"<LaborRate(job_type='{self.job_type}', hourly_rate={self.hourly_rate})>"
|
| 19 |
+
|
| 20 |
+
class Material(Base):
|
| 21 |
+
"""Model for materials and their unit prices"""
|
| 22 |
+
__tablename__ = "materials"
|
| 23 |
+
|
| 24 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 25 |
+
name = Column(String, index=True, nullable=False)
|
| 26 |
+
unit = Column(String, nullable=False) # e.g., kg, meter, piece
|
| 27 |
+
unit_price = Column(Float, nullable=False)
|
| 28 |
+
currency = Column(String, default="TRY")
|
| 29 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 30 |
+
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
| 31 |
+
|
| 32 |
+
def __repr__(self):
|
| 33 |
+
return f"<Material(name='{self.name}', unit='{self.unit}', unit_price={self.unit_price})>"
|
| 34 |
+
|
| 35 |
+
class MarginProfile(Base):
|
| 36 |
+
"""Model for margin profiles by project type"""
|
| 37 |
+
__tablename__ = "margin_profiles"
|
| 38 |
+
|
| 39 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 40 |
+
profile_name = Column(String, unique=True, index=True, nullable=False)
|
| 41 |
+
margin_percentage = Column(Float, nullable=False) # Stored as decimal (e.g., 0.15 for 15%)
|
| 42 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 43 |
+
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
| 44 |
+
|
| 45 |
+
def __repr__(self):
|
| 46 |
+
return f"<MarginProfile(profile_name='{self.profile_name}', margin_percentage={self.margin_percentage})>"
|
| 47 |
+
|
| 48 |
+
class ChatHistory(Base):
|
| 49 |
+
"""Model for storing chat history"""
|
| 50 |
+
__tablename__ = "chat_history"
|
| 51 |
+
|
| 52 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 53 |
+
user_input = Column(String, nullable=False)
|
| 54 |
+
assistant_response = Column(String, nullable=False)
|
| 55 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 56 |
+
|
| 57 |
+
def __repr__(self):
|
| 58 |
+
return f"<ChatHistory(id={self.id}, created_at={self.created_at})>"
|
| 59 |
+
|
| 60 |
+
class Document(Base):
|
| 61 |
+
"""Model for storing uploaded documents"""
|
| 62 |
+
__tablename__ = "documents"
|
| 63 |
+
|
| 64 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 65 |
+
filename = Column(String, nullable=False)
|
| 66 |
+
content_type = Column(String, nullable=False)
|
| 67 |
+
file_path = Column(String, nullable=False) # Path to stored file
|
| 68 |
+
file_size = Column(Integer, nullable=False) # Size in bytes
|
| 69 |
+
content_text = Column(Text, nullable=True) # Extracted text content
|
| 70 |
+
analysis_result = Column(Text, nullable=True) # Analysis results in JSON format
|
| 71 |
+
document_type = Column(String, nullable=True) # Type of document (uploaded, generated)
|
| 72 |
+
template_type = Column(String, nullable=True) # Template type for generated documents
|
| 73 |
+
uploaded_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 74 |
+
analyzed_at = Column(DateTime(timezone=True), nullable=True)
|
| 75 |
+
|
| 76 |
+
def __repr__(self):
|
| 77 |
+
return f"<Document(id={self.id}, filename='{self.filename}', uploaded_at={self.uploaded_at})>"
|
app/document_generator.py
ADDED
|
@@ -0,0 +1,985 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from typing import Dict, List, Optional, Union
|
| 5 |
+
|
| 6 |
+
import docx
|
| 7 |
+
from docx.shared import Pt, Cm
|
| 8 |
+
from docx.enum.text import WD_ALIGN_PARAGRAPH
|
| 9 |
+
from docx.enum.style import WD_STYLE_TYPE
|
| 10 |
+
|
| 11 |
+
import openpyxl
|
| 12 |
+
from openpyxl.styles import Font, Alignment, PatternFill, Border, Side
|
| 13 |
+
from openpyxl.utils import get_column_letter
|
| 14 |
+
|
| 15 |
+
from sqlalchemy.orm import Session
|
| 16 |
+
from app.db.models import Document
|
| 17 |
+
|
| 18 |
+
# Doküman çıktı dizini
|
| 19 |
+
OUTPUT_DIR = os.path.join(os.getcwd(), 'uploads')
|
| 20 |
+
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
| 21 |
+
|
| 22 |
+
def create_word_document(data: Dict, template_type: str = 'maliyet_raporu') -> str:
|
| 23 |
+
"""
|
| 24 |
+
Verilen verilerle Word dokümanı oluşturur
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
data: Doküman içeriği için veriler
|
| 28 |
+
template_type: Şablon türü (maliyet_raporu, teklif, vs.)
|
| 29 |
+
|
| 30 |
+
Returns:
|
| 31 |
+
Oluşturulan dosyanın yolu
|
| 32 |
+
"""
|
| 33 |
+
doc = docx.Document()
|
| 34 |
+
|
| 35 |
+
# Stil oluştur
|
| 36 |
+
styles = doc.styles
|
| 37 |
+
|
| 38 |
+
# Başlık stili
|
| 39 |
+
if 'Başlık' not in styles:
|
| 40 |
+
style = styles.add_style('Başlık', WD_STYLE_TYPE.PARAGRAPH)
|
| 41 |
+
font = style.font
|
| 42 |
+
font.name = 'Arial'
|
| 43 |
+
font.size = Pt(16)
|
| 44 |
+
font.bold = True
|
| 45 |
+
|
| 46 |
+
# Alt başlık stili
|
| 47 |
+
if 'Alt Başlık' not in styles:
|
| 48 |
+
style = styles.add_style('Alt Başlık', WD_STYLE_TYPE.PARAGRAPH)
|
| 49 |
+
font = style.font
|
| 50 |
+
font.name = 'Arial'
|
| 51 |
+
font.size = Pt(14)
|
| 52 |
+
font.bold = True
|
| 53 |
+
|
| 54 |
+
# Normal metin stili
|
| 55 |
+
if 'Normal Metin' not in styles:
|
| 56 |
+
style = styles.add_style('Normal Metin', WD_STYLE_TYPE.PARAGRAPH)
|
| 57 |
+
font = style.font
|
| 58 |
+
font.name = 'Arial'
|
| 59 |
+
font.size = Pt(11)
|
| 60 |
+
|
| 61 |
+
# Tablo başlık stili
|
| 62 |
+
if 'Tablo Başlık' not in styles:
|
| 63 |
+
style = styles.add_style('Tablo Başlık', WD_STYLE_TYPE.PARAGRAPH)
|
| 64 |
+
font = style.font
|
| 65 |
+
font.name = 'Arial'
|
| 66 |
+
font.size = Pt(11)
|
| 67 |
+
font.bold = True
|
| 68 |
+
|
| 69 |
+
# Şablon türüne göre doküman oluştur
|
| 70 |
+
if template_type == 'maliyet_raporu':
|
| 71 |
+
_create_cost_report_word(doc, data)
|
| 72 |
+
elif template_type == 'teklif':
|
| 73 |
+
_create_offer_word(doc, data)
|
| 74 |
+
else:
|
| 75 |
+
# Varsayılan şablon
|
| 76 |
+
_create_default_word(doc, data)
|
| 77 |
+
|
| 78 |
+
# Dosyayı kaydet
|
| 79 |
+
timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
|
| 80 |
+
filename = f"{timestamp}_{template_type}.docx"
|
| 81 |
+
file_path = os.path.join(OUTPUT_DIR, filename)
|
| 82 |
+
doc.save(file_path)
|
| 83 |
+
|
| 84 |
+
return file_path
|
| 85 |
+
|
| 86 |
+
def _create_cost_report_word(doc, data):
|
| 87 |
+
"""
|
| 88 |
+
Maliyet raporu Word dokümanı oluşturur
|
| 89 |
+
"""
|
| 90 |
+
# Başlık
|
| 91 |
+
heading = doc.add_paragraph("MALİYET RAPORU", style='Başlık')
|
| 92 |
+
heading.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
| 93 |
+
|
| 94 |
+
# Tarih
|
| 95 |
+
date_paragraph = doc.add_paragraph(f"Tarih: {datetime.now().strftime('%d.%m.%Y')}", style='Normal Metin')
|
| 96 |
+
date_paragraph.alignment = WD_ALIGN_PARAGRAPH.RIGHT
|
| 97 |
+
|
| 98 |
+
doc.add_paragraph()
|
| 99 |
+
|
| 100 |
+
# Proje bilgileri
|
| 101 |
+
if 'proje_bilgileri' in data:
|
| 102 |
+
doc.add_paragraph("PROJE BİLGİLERİ", style='Alt Başlık')
|
| 103 |
+
for key, value in data['proje_bilgileri'].items():
|
| 104 |
+
doc.add_paragraph(f"{key}: {value}", style='Normal Metin')
|
| 105 |
+
doc.add_paragraph()
|
| 106 |
+
|
| 107 |
+
# İşçilik maliyetleri
|
| 108 |
+
if 'iscilik_maliyetleri' in data:
|
| 109 |
+
doc.add_paragraph("İŞÇİLİK MALİYETLERİ", style='Alt Başlık')
|
| 110 |
+
|
| 111 |
+
# Tablo oluştur
|
| 112 |
+
table = doc.add_table(rows=1, cols=4)
|
| 113 |
+
table.style = 'Table Grid'
|
| 114 |
+
|
| 115 |
+
# Tablo başlıkları
|
| 116 |
+
header_cells = table.rows[0].cells
|
| 117 |
+
header_cells[0].text = "Pozisyon"
|
| 118 |
+
header_cells[1].text = "Saat Ücreti (TL)"
|
| 119 |
+
header_cells[2].text = "Süre (Saat)"
|
| 120 |
+
header_cells[3].text = "Toplam (TL)"
|
| 121 |
+
|
| 122 |
+
# Başlık hücrelerini kalın yap
|
| 123 |
+
for cell in header_cells:
|
| 124 |
+
for paragraph in cell.paragraphs:
|
| 125 |
+
for run in paragraph.runs:
|
| 126 |
+
run.font.bold = True
|
| 127 |
+
|
| 128 |
+
# Verileri tabloya ekle
|
| 129 |
+
for item in data['iscilik_maliyetleri']:
|
| 130 |
+
row_cells = table.add_row().cells
|
| 131 |
+
row_cells[0].text = item.get('pozisyon', '')
|
| 132 |
+
row_cells[1].text = str(item.get('saat_ucreti', 0))
|
| 133 |
+
row_cells[2].text = str(item.get('sure', 0))
|
| 134 |
+
row_cells[3].text = str(item.get('toplam', 0))
|
| 135 |
+
|
| 136 |
+
# Toplam satırı
|
| 137 |
+
if 'iscilik_toplam' in data:
|
| 138 |
+
row_cells = table.add_row().cells
|
| 139 |
+
row_cells[0].text = "TOPLAM"
|
| 140 |
+
row_cells[0].merge(row_cells[2])
|
| 141 |
+
row_cells[3].text = str(data['iscilik_toplam'])
|
| 142 |
+
|
| 143 |
+
# Toplam satırını kalın yap
|
| 144 |
+
for cell in [row_cells[0], row_cells[3]]:
|
| 145 |
+
for paragraph in cell.paragraphs:
|
| 146 |
+
for run in paragraph.runs:
|
| 147 |
+
run.font.bold = True
|
| 148 |
+
|
| 149 |
+
doc.add_paragraph()
|
| 150 |
+
|
| 151 |
+
# Malzeme maliyetleri
|
| 152 |
+
if 'malzeme_maliyetleri' in data:
|
| 153 |
+
doc.add_paragraph("MALZEME MALİYETLERİ", style='Alt Başlık')
|
| 154 |
+
|
| 155 |
+
# Tablo oluştur
|
| 156 |
+
table = doc.add_table(rows=1, cols=5)
|
| 157 |
+
table.style = 'Table Grid'
|
| 158 |
+
|
| 159 |
+
# Tablo başlıkları
|
| 160 |
+
header_cells = table.rows[0].cells
|
| 161 |
+
header_cells[0].text = "Malzeme"
|
| 162 |
+
header_cells[1].text = "Birim Fiyat (TL)"
|
| 163 |
+
header_cells[2].text = "Miktar"
|
| 164 |
+
header_cells[3].text = "Birim"
|
| 165 |
+
header_cells[4].text = "Toplam (TL)"
|
| 166 |
+
|
| 167 |
+
# Başlık hücrelerini kalın yap
|
| 168 |
+
for cell in header_cells:
|
| 169 |
+
for paragraph in cell.paragraphs:
|
| 170 |
+
for run in paragraph.runs:
|
| 171 |
+
run.font.bold = True
|
| 172 |
+
|
| 173 |
+
# Verileri tabloya ekle
|
| 174 |
+
for item in data['malzeme_maliyetleri']:
|
| 175 |
+
row_cells = table.add_row().cells
|
| 176 |
+
row_cells[0].text = item.get('malzeme', '')
|
| 177 |
+
row_cells[1].text = str(item.get('birim_fiyat', 0))
|
| 178 |
+
row_cells[2].text = str(item.get('miktar', 0))
|
| 179 |
+
row_cells[3].text = item.get('birim', '')
|
| 180 |
+
row_cells[4].text = str(item.get('toplam', 0))
|
| 181 |
+
|
| 182 |
+
# Toplam satırı
|
| 183 |
+
if 'malzeme_toplam' in data:
|
| 184 |
+
row_cells = table.add_row().cells
|
| 185 |
+
row_cells[0].text = "TOPLAM"
|
| 186 |
+
row_cells[0].merge(row_cells[3])
|
| 187 |
+
row_cells[4].text = str(data['malzeme_toplam'])
|
| 188 |
+
|
| 189 |
+
# Toplam satırını kalın yap
|
| 190 |
+
for cell in [row_cells[0], row_cells[4]]:
|
| 191 |
+
for paragraph in cell.paragraphs:
|
| 192 |
+
for run in paragraph.runs:
|
| 193 |
+
run.font.bold = True
|
| 194 |
+
|
| 195 |
+
doc.add_paragraph()
|
| 196 |
+
|
| 197 |
+
# Toplam maliyet
|
| 198 |
+
if 'toplam_maliyet' in data:
|
| 199 |
+
doc.add_paragraph("TOPLAM MALİYET", style='Alt Başlık')
|
| 200 |
+
|
| 201 |
+
# Tablo oluştur
|
| 202 |
+
table = doc.add_table(rows=1, cols=2)
|
| 203 |
+
table.style = 'Table Grid'
|
| 204 |
+
|
| 205 |
+
# Tablo başlıkları
|
| 206 |
+
header_cells = table.rows[0].cells
|
| 207 |
+
header_cells[0].text = "Maliyet Kalemi"
|
| 208 |
+
header_cells[1].text = "Tutar (TL)"
|
| 209 |
+
|
| 210 |
+
# Başlık hücrelerini kalın yap
|
| 211 |
+
for cell in header_cells:
|
| 212 |
+
for paragraph in cell.paragraphs:
|
| 213 |
+
for run in paragraph.runs:
|
| 214 |
+
run.font.bold = True
|
| 215 |
+
|
| 216 |
+
# İşçilik toplamı
|
| 217 |
+
if 'iscilik_toplam' in data:
|
| 218 |
+
row_cells = table.add_row().cells
|
| 219 |
+
row_cells[0].text = "İşçilik Maliyeti"
|
| 220 |
+
row_cells[1].text = str(data['iscilik_toplam'])
|
| 221 |
+
|
| 222 |
+
# Malzeme toplamı
|
| 223 |
+
if 'malzeme_toplam' in data:
|
| 224 |
+
row_cells = table.add_row().cells
|
| 225 |
+
row_cells[0].text = "Malzeme Maliyeti"
|
| 226 |
+
row_cells[1].text = str(data['malzeme_toplam'])
|
| 227 |
+
|
| 228 |
+
# Diğer maliyetler
|
| 229 |
+
if 'diger_maliyetler' in data:
|
| 230 |
+
for key, value in data['diger_maliyetler'].items():
|
| 231 |
+
row_cells = table.add_row().cells
|
| 232 |
+
row_cells[0].text = key
|
| 233 |
+
row_cells[1].text = str(value)
|
| 234 |
+
|
| 235 |
+
# Toplam maliyet
|
| 236 |
+
row_cells = table.add_row().cells
|
| 237 |
+
row_cells[0].text = "TOPLAM MALİYET"
|
| 238 |
+
row_cells[1].text = str(data['toplam_maliyet'])
|
| 239 |
+
|
| 240 |
+
# Toplam satırını kalın yap
|
| 241 |
+
for cell in row_cells:
|
| 242 |
+
for paragraph in cell.paragraphs:
|
| 243 |
+
for run in paragraph.runs:
|
| 244 |
+
run.font.bold = True
|
| 245 |
+
|
| 246 |
+
doc.add_paragraph()
|
| 247 |
+
|
| 248 |
+
# Kar marjı ve teklif fiyatı
|
| 249 |
+
if 'kar_marji' in data and 'teklif_fiyati' in data:
|
| 250 |
+
doc.add_paragraph("TEKLİF BİLGİLERİ", style='Alt Başlık')
|
| 251 |
+
|
| 252 |
+
# Tablo oluştur
|
| 253 |
+
table = doc.add_table(rows=1, cols=2)
|
| 254 |
+
table.style = 'Table Grid'
|
| 255 |
+
|
| 256 |
+
# Tablo başlıkları
|
| 257 |
+
header_cells = table.rows[0].cells
|
| 258 |
+
header_cells[0].text = "Kalem"
|
| 259 |
+
header_cells[1].text = "Değer"
|
| 260 |
+
|
| 261 |
+
# Başlık hücrelerini kalın yap
|
| 262 |
+
for cell in header_cells:
|
| 263 |
+
for paragraph in cell.paragraphs:
|
| 264 |
+
for run in paragraph.runs:
|
| 265 |
+
run.font.bold = True
|
| 266 |
+
|
| 267 |
+
# Toplam maliyet
|
| 268 |
+
row_cells = table.add_row().cells
|
| 269 |
+
row_cells[0].text = "Toplam Maliyet (TL)"
|
| 270 |
+
row_cells[1].text = str(data['toplam_maliyet'])
|
| 271 |
+
|
| 272 |
+
# Kar marjı
|
| 273 |
+
row_cells = table.add_row().cells
|
| 274 |
+
row_cells[0].text = "Kar Marjı (%)"
|
| 275 |
+
row_cells[1].text = str(data['kar_marji'])
|
| 276 |
+
|
| 277 |
+
# Kar tutarı
|
| 278 |
+
if 'kar_tutari' in data:
|
| 279 |
+
row_cells = table.add_row().cells
|
| 280 |
+
row_cells[0].text = "Kar Tutarı (TL)"
|
| 281 |
+
row_cells[1].text = str(data['kar_tutari'])
|
| 282 |
+
|
| 283 |
+
# Teklif fiyatı
|
| 284 |
+
row_cells = table.add_row().cells
|
| 285 |
+
row_cells[0].text = "TEKLİF FİYATI (TL)"
|
| 286 |
+
row_cells[1].text = str(data['teklif_fiyati'])
|
| 287 |
+
|
| 288 |
+
# Teklif fiyatı satırını kalın yap
|
| 289 |
+
for cell in row_cells:
|
| 290 |
+
for paragraph in cell.paragraphs:
|
| 291 |
+
for run in paragraph.runs:
|
| 292 |
+
run.font.bold = True
|
| 293 |
+
|
| 294 |
+
def _create_offer_word(doc, data):
|
| 295 |
+
"""
|
| 296 |
+
Teklif Word dokümanı oluşturur
|
| 297 |
+
"""
|
| 298 |
+
# Başlık
|
| 299 |
+
heading = doc.add_paragraph("TEKLİF", style='Başlık')
|
| 300 |
+
heading.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
| 301 |
+
|
| 302 |
+
# Tarih ve referans
|
| 303 |
+
date_paragraph = doc.add_paragraph(f"Tarih: {datetime.now().strftime('%d.%m.%Y')}", style='Normal Metin')
|
| 304 |
+
date_paragraph.alignment = WD_ALIGN_PARAGRAPH.RIGHT
|
| 305 |
+
|
| 306 |
+
if 'referans_no' in data:
|
| 307 |
+
ref_paragraph = doc.add_paragraph(f"Referans No: {data['referans_no']}", style='Normal Metin')
|
| 308 |
+
ref_paragraph.alignment = WD_ALIGN_PARAGRAPH.RIGHT
|
| 309 |
+
|
| 310 |
+
doc.add_paragraph()
|
| 311 |
+
|
| 312 |
+
# Müşteri bilgileri
|
| 313 |
+
if 'musteri_bilgileri' in data:
|
| 314 |
+
doc.add_paragraph("MÜŞTERİ BİLGİLERİ", style='Alt Başlık')
|
| 315 |
+
for key, value in data['musteri_bilgileri'].items():
|
| 316 |
+
doc.add_paragraph(f"{key}: {value}", style='Normal Metin')
|
| 317 |
+
doc.add_paragraph()
|
| 318 |
+
|
| 319 |
+
# Proje bilgileri
|
| 320 |
+
if 'proje_bilgileri' in data:
|
| 321 |
+
doc.add_paragraph("PROJE BİLGİLERİ", style='Alt Başlık')
|
| 322 |
+
for key, value in data['proje_bilgileri'].items():
|
| 323 |
+
doc.add_paragraph(f"{key}: {value}", style='Normal Metin')
|
| 324 |
+
doc.add_paragraph()
|
| 325 |
+
|
| 326 |
+
# Teklif detayları
|
| 327 |
+
if 'teklif_detaylari' in data:
|
| 328 |
+
doc.add_paragraph("TEKLİF DETAYLARI", style='Alt Başlık')
|
| 329 |
+
|
| 330 |
+
# Tablo oluştur
|
| 331 |
+
table = doc.add_table(rows=1, cols=5)
|
| 332 |
+
table.style = 'Table Grid'
|
| 333 |
+
|
| 334 |
+
# Tablo başlıkları
|
| 335 |
+
header_cells = table.rows[0].cells
|
| 336 |
+
header_cells[0].text = "Kalem"
|
| 337 |
+
header_cells[1].text = "Açıklama"
|
| 338 |
+
header_cells[2].text = "Miktar"
|
| 339 |
+
header_cells[3].text = "Birim Fiyat (TL)"
|
| 340 |
+
header_cells[4].text = "Toplam (TL)"
|
| 341 |
+
|
| 342 |
+
# Başlık hücrelerini kalın yap
|
| 343 |
+
for cell in header_cells:
|
| 344 |
+
for paragraph in cell.paragraphs:
|
| 345 |
+
for run in paragraph.runs:
|
| 346 |
+
run.font.bold = True
|
| 347 |
+
|
| 348 |
+
# Verileri tabloya ekle
|
| 349 |
+
for item in data['teklif_detaylari']:
|
| 350 |
+
row_cells = table.add_row().cells
|
| 351 |
+
row_cells[0].text = item.get('kalem', '')
|
| 352 |
+
row_cells[1].text = item.get('aciklama', '')
|
| 353 |
+
row_cells[2].text = str(item.get('miktar', 0))
|
| 354 |
+
row_cells[3].text = str(item.get('birim_fiyat', 0))
|
| 355 |
+
row_cells[4].text = str(item.get('toplam', 0))
|
| 356 |
+
|
| 357 |
+
# Toplam satırı
|
| 358 |
+
if 'toplam_fiyat' in data:
|
| 359 |
+
row_cells = table.add_row().cells
|
| 360 |
+
row_cells[0].text = "TOPLAM"
|
| 361 |
+
row_cells[0].merge(row_cells[3])
|
| 362 |
+
row_cells[4].text = str(data['toplam_fiyat'])
|
| 363 |
+
|
| 364 |
+
# Toplam satırını kalın yap
|
| 365 |
+
for cell in [row_cells[0], row_cells[4]]:
|
| 366 |
+
for paragraph in cell.paragraphs:
|
| 367 |
+
for run in paragraph.runs:
|
| 368 |
+
run.font.bold = True
|
| 369 |
+
|
| 370 |
+
doc.add_paragraph()
|
| 371 |
+
|
| 372 |
+
# Ödeme ve teslimat koşulları
|
| 373 |
+
if 'odeme_kosullari' in data or 'teslimat_kosullari' in data:
|
| 374 |
+
doc.add_paragraph("ÖDEME VE TESLİMAT KOŞULLARI", style='Alt Başlık')
|
| 375 |
+
|
| 376 |
+
if 'odeme_kosullari' in data:
|
| 377 |
+
doc.add_paragraph("Ödeme Koşulları:", style='Tablo Başlık')
|
| 378 |
+
doc.add_paragraph(data['odeme_kosullari'], style='Normal Metin')
|
| 379 |
+
|
| 380 |
+
if 'teslimat_kosullari' in data:
|
| 381 |
+
doc.add_paragraph("Teslimat Koşulları:", style='Tablo Başlık')
|
| 382 |
+
doc.add_paragraph(data['teslimat_kosullari'], style='Normal Metin')
|
| 383 |
+
|
| 384 |
+
doc.add_paragraph()
|
| 385 |
+
|
| 386 |
+
# Geçerlilik süresi
|
| 387 |
+
if 'gecerlilik_suresi' in data:
|
| 388 |
+
doc.add_paragraph("GEÇERLİLİK SÜRESİ", style='Alt Başlık')
|
| 389 |
+
doc.add_paragraph(data['gecerlilik_suresi'], style='Normal Metin')
|
| 390 |
+
doc.add_paragraph()
|
| 391 |
+
|
| 392 |
+
# İmza
|
| 393 |
+
doc.add_paragraph("Saygılarımızla,", style='Normal Metin')
|
| 394 |
+
doc.add_paragraph()
|
| 395 |
+
doc.add_paragraph()
|
| 396 |
+
doc.add_paragraph("[İmza]", style='Normal Metin')
|
| 397 |
+
|
| 398 |
+
if 'firma_bilgileri' in data:
|
| 399 |
+
for key, value in data['firma_bilgileri'].items():
|
| 400 |
+
doc.add_paragraph(f"{value}", style='Normal Metin')
|
| 401 |
+
|
| 402 |
+
def _create_default_word(doc, data):
|
| 403 |
+
"""
|
| 404 |
+
Varsayılan Word dokümanı oluşturur
|
| 405 |
+
"""
|
| 406 |
+
# Başlık
|
| 407 |
+
heading = doc.add_paragraph(data.get('baslik', 'DOKÜMAN'), style='Başlık')
|
| 408 |
+
heading.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
| 409 |
+
|
| 410 |
+
# Tarih
|
| 411 |
+
date_paragraph = doc.add_paragraph(f"Tarih: {datetime.now().strftime('%d.%m.%Y')}", style='Normal Metin')
|
| 412 |
+
date_paragraph.alignment = WD_ALIGN_PARAGRAPH.RIGHT
|
| 413 |
+
|
| 414 |
+
doc.add_paragraph()
|
| 415 |
+
|
| 416 |
+
# İçerik
|
| 417 |
+
if 'icerik' in data:
|
| 418 |
+
for item in data['icerik']:
|
| 419 |
+
if item.get('tip') == 'baslik':
|
| 420 |
+
doc.add_paragraph(item.get('metin', ''), style='Alt Başlık')
|
| 421 |
+
elif item.get('tip') == 'paragraf':
|
| 422 |
+
doc.add_paragraph(item.get('metin', ''), style='Normal Metin')
|
| 423 |
+
elif item.get('tip') == 'liste':
|
| 424 |
+
for liste_item in item.get('liste_ogeler', []):
|
| 425 |
+
paragraph = doc.add_paragraph(style='List Bullet')
|
| 426 |
+
paragraph.add_run(liste_item)
|
| 427 |
+
|
| 428 |
+
def create_excel_document(data: Dict, template_type: str = 'maliyet_raporu') -> str:
|
| 429 |
+
"""
|
| 430 |
+
Verilen verilerle Excel dokümanı oluşturur
|
| 431 |
+
|
| 432 |
+
Args:
|
| 433 |
+
data: Doküman içeriği için veriler
|
| 434 |
+
template_type: Şablon türü (maliyet_raporu, teklif, vs.)
|
| 435 |
+
|
| 436 |
+
Returns:
|
| 437 |
+
Oluşturulan dosyanın yolu
|
| 438 |
+
"""
|
| 439 |
+
wb = openpyxl.Workbook()
|
| 440 |
+
|
| 441 |
+
# Şablon türüne göre doküman oluştur
|
| 442 |
+
if template_type == 'maliyet_raporu':
|
| 443 |
+
_create_cost_report_excel(wb, data)
|
| 444 |
+
elif template_type == 'teklif':
|
| 445 |
+
_create_offer_excel(wb, data)
|
| 446 |
+
else:
|
| 447 |
+
# Varsayılan şablon
|
| 448 |
+
_create_default_excel(wb, data)
|
| 449 |
+
|
| 450 |
+
# Dosyayı kaydet
|
| 451 |
+
timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
|
| 452 |
+
filename = f"{timestamp}_{template_type}.xlsx"
|
| 453 |
+
file_path = os.path.join(OUTPUT_DIR, filename)
|
| 454 |
+
wb.save(file_path)
|
| 455 |
+
|
| 456 |
+
return file_path
|
| 457 |
+
|
| 458 |
+
def _create_cost_report_excel(wb, data):
|
| 459 |
+
"""
|
| 460 |
+
Maliyet raporu Excel dokümanı oluşturur
|
| 461 |
+
"""
|
| 462 |
+
# Varsayılan sayfayı sil
|
| 463 |
+
if 'Sheet' in wb.sheetnames:
|
| 464 |
+
del wb['Sheet']
|
| 465 |
+
|
| 466 |
+
# Sayfalar oluştur
|
| 467 |
+
ws_summary = wb.create_sheet("Özet")
|
| 468 |
+
ws_labor = wb.create_sheet("İşçilik Maliyetleri")
|
| 469 |
+
ws_material = wb.create_sheet("Malzeme Maliyetleri")
|
| 470 |
+
|
| 471 |
+
# Stil tanımlamaları
|
| 472 |
+
header_font = Font(name='Arial', size=12, bold=True)
|
| 473 |
+
normal_font = Font(name='Arial', size=11)
|
| 474 |
+
total_font = Font(name='Arial', size=11, bold=True)
|
| 475 |
+
|
| 476 |
+
header_fill = PatternFill(start_color="DDDDDD", end_color="DDDDDD", fill_type="solid")
|
| 477 |
+
total_fill = PatternFill(start_color="EEEEEE", end_color="EEEEEE", fill_type="solid")
|
| 478 |
+
|
| 479 |
+
center_align = Alignment(horizontal='center', vertical='center')
|
| 480 |
+
right_align = Alignment(horizontal='right', vertical='center')
|
| 481 |
+
|
| 482 |
+
thin_border = Border(
|
| 483 |
+
left=Side(style='thin'),
|
| 484 |
+
right=Side(style='thin'),
|
| 485 |
+
top=Side(style='thin'),
|
| 486 |
+
bottom=Side(style='thin')
|
| 487 |
+
)
|
| 488 |
+
|
| 489 |
+
# Özet sayfası
|
| 490 |
+
ws_summary.column_dimensions['A'].width = 30
|
| 491 |
+
ws_summary.column_dimensions['B'].width = 15
|
| 492 |
+
|
| 493 |
+
# Başlık
|
| 494 |
+
ws_summary.merge_cells('A1:B1')
|
| 495 |
+
ws_summary['A1'] = "MALİYET RAPORU"
|
| 496 |
+
ws_summary['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 497 |
+
ws_summary['A1'].alignment = center_align
|
| 498 |
+
|
| 499 |
+
# Tarih
|
| 500 |
+
ws_summary['A2'] = "Tarih:"
|
| 501 |
+
ws_summary['B2'] = datetime.now().strftime('%d.%m.%Y')
|
| 502 |
+
ws_summary['A2'].font = normal_font
|
| 503 |
+
ws_summary['B2'].font = normal_font
|
| 504 |
+
|
| 505 |
+
# Proje bilgileri
|
| 506 |
+
row = 4
|
| 507 |
+
if 'proje_bilgileri' in data:
|
| 508 |
+
ws_summary['A3'] = "PROJE BİLGİLERİ"
|
| 509 |
+
ws_summary['A3'].font = header_font
|
| 510 |
+
|
| 511 |
+
for key, value in data['proje_bilgileri'].items():
|
| 512 |
+
ws_summary[f'A{row}'] = key
|
| 513 |
+
ws_summary[f'B{row}'] = value
|
| 514 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 515 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 516 |
+
row += 1
|
| 517 |
+
|
| 518 |
+
row += 1
|
| 519 |
+
|
| 520 |
+
# Toplam maliyet tablosu
|
| 521 |
+
ws_summary[f'A{row}'] = "TOPLAM MALİYET"
|
| 522 |
+
ws_summary[f'A{row}'].font = header_font
|
| 523 |
+
row += 1
|
| 524 |
+
|
| 525 |
+
# Tablo başlıkları
|
| 526 |
+
ws_summary[f'A{row}'] = "Maliyet Kalemi"
|
| 527 |
+
ws_summary[f'B{row}'] = "Tutar (TL)"
|
| 528 |
+
ws_summary[f'A{row}'].font = header_font
|
| 529 |
+
ws_summary[f'B{row}'].font = header_font
|
| 530 |
+
ws_summary[f'A{row}'].fill = header_fill
|
| 531 |
+
ws_summary[f'B{row}'].fill = header_fill
|
| 532 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 533 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 534 |
+
row += 1
|
| 535 |
+
|
| 536 |
+
# İşçilik toplamı
|
| 537 |
+
if 'iscilik_toplam' in data:
|
| 538 |
+
ws_summary[f'A{row}'] = "İşçilik Maliyeti"
|
| 539 |
+
ws_summary[f'B{row}'] = data['iscilik_toplam']
|
| 540 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 541 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 542 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 543 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 544 |
+
row += 1
|
| 545 |
+
|
| 546 |
+
# Malzeme toplamı
|
| 547 |
+
if 'malzeme_toplam' in data:
|
| 548 |
+
ws_summary[f'A{row}'] = "Malzeme Maliyeti"
|
| 549 |
+
ws_summary[f'B{row}'] = data['malzeme_toplam']
|
| 550 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 551 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 552 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 553 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 554 |
+
row += 1
|
| 555 |
+
|
| 556 |
+
# Diğer maliyetler
|
| 557 |
+
if 'diger_maliyetler' in data:
|
| 558 |
+
for key, value in data['diger_maliyetler'].items():
|
| 559 |
+
ws_summary[f'A{row}'] = key
|
| 560 |
+
ws_summary[f'B{row}'] = value
|
| 561 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 562 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 563 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 564 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 565 |
+
row += 1
|
| 566 |
+
|
| 567 |
+
# Toplam maliyet
|
| 568 |
+
if 'toplam_maliyet' in data:
|
| 569 |
+
ws_summary[f'A{row}'] = "TOPLAM MALİYET"
|
| 570 |
+
ws_summary[f'B{row}'] = data['toplam_maliyet']
|
| 571 |
+
ws_summary[f'A{row}'].font = total_font
|
| 572 |
+
ws_summary[f'B{row}'].font = total_font
|
| 573 |
+
ws_summary[f'A{row}'].fill = total_fill
|
| 574 |
+
ws_summary[f'B{row}'].fill = total_fill
|
| 575 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 576 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 577 |
+
row += 2
|
| 578 |
+
|
| 579 |
+
# Kar marjı ve teklif fiyatı
|
| 580 |
+
if 'kar_marji' in data and 'teklif_fiyati' in data:
|
| 581 |
+
ws_summary[f'A{row}'] = "TEKLİF BİLGİLERİ"
|
| 582 |
+
ws_summary[f'A{row}'].font = header_font
|
| 583 |
+
row += 1
|
| 584 |
+
|
| 585 |
+
# Tablo başlıkları
|
| 586 |
+
ws_summary[f'A{row}'] = "Kalem"
|
| 587 |
+
ws_summary[f'B{row}'] = "Değer"
|
| 588 |
+
ws_summary[f'A{row}'].font = header_font
|
| 589 |
+
ws_summary[f'B{row}'].font = header_font
|
| 590 |
+
ws_summary[f'A{row}'].fill = header_fill
|
| 591 |
+
ws_summary[f'B{row}'].fill = header_fill
|
| 592 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 593 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 594 |
+
row += 1
|
| 595 |
+
|
| 596 |
+
# Toplam maliyet
|
| 597 |
+
ws_summary[f'A{row}'] = "Toplam Maliyet (TL)"
|
| 598 |
+
ws_summary[f'B{row}'] = data['toplam_maliyet']
|
| 599 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 600 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 601 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 602 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 603 |
+
row += 1
|
| 604 |
+
|
| 605 |
+
# Kar marjı
|
| 606 |
+
ws_summary[f'A{row}'] = "Kar Marjı (%)"
|
| 607 |
+
ws_summary[f'B{row}'] = data['kar_marji']
|
| 608 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 609 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 610 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 611 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 612 |
+
row += 1
|
| 613 |
+
|
| 614 |
+
# Kar tutarı
|
| 615 |
+
if 'kar_tutari' in data:
|
| 616 |
+
ws_summary[f'A{row}'] = "Kar Tutarı (TL)"
|
| 617 |
+
ws_summary[f'B{row}'] = data['kar_tutari']
|
| 618 |
+
ws_summary[f'A{row}'].font = normal_font
|
| 619 |
+
ws_summary[f'B{row}'].font = normal_font
|
| 620 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 621 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 622 |
+
row += 1
|
| 623 |
+
|
| 624 |
+
# Teklif fiyatı
|
| 625 |
+
ws_summary[f'A{row}'] = "TEKLİF FİYATI (TL)"
|
| 626 |
+
ws_summary[f'B{row}'] = data['teklif_fiyati']
|
| 627 |
+
ws_summary[f'A{row}'].font = total_font
|
| 628 |
+
ws_summary[f'B{row}'].font = total_font
|
| 629 |
+
ws_summary[f'A{row}'].fill = total_fill
|
| 630 |
+
ws_summary[f'B{row}'].fill = total_fill
|
| 631 |
+
ws_summary[f'A{row}'].border = thin_border
|
| 632 |
+
ws_summary[f'B{row}'].border = thin_border
|
| 633 |
+
|
| 634 |
+
# İşçilik maliyetleri sayfası
|
| 635 |
+
if 'iscilik_maliyetleri' in data:
|
| 636 |
+
ws_labor.column_dimensions['A'].width = 30
|
| 637 |
+
ws_labor.column_dimensions['B'].width = 15
|
| 638 |
+
ws_labor.column_dimensions['C'].width = 15
|
| 639 |
+
ws_labor.column_dimensions['D'].width = 15
|
| 640 |
+
|
| 641 |
+
# Başlık
|
| 642 |
+
ws_labor.merge_cells('A1:D1')
|
| 643 |
+
ws_labor['A1'] = "İŞÇİLİK MALİYETLERİ"
|
| 644 |
+
ws_labor['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 645 |
+
ws_labor['A1'].alignment = center_align
|
| 646 |
+
|
| 647 |
+
# Tablo başlıkları
|
| 648 |
+
ws_labor['A3'] = "Pozisyon"
|
| 649 |
+
ws_labor['B3'] = "Saat Ücreti (TL)"
|
| 650 |
+
ws_labor['C3'] = "Süre (Saat)"
|
| 651 |
+
ws_labor['D3'] = "Toplam (TL)"
|
| 652 |
+
|
| 653 |
+
for col in ['A', 'B', 'C', 'D']:
|
| 654 |
+
ws_labor[f'{col}3'].font = header_font
|
| 655 |
+
ws_labor[f'{col}3'].fill = header_fill
|
| 656 |
+
ws_labor[f'{col}3'].border = thin_border
|
| 657 |
+
|
| 658 |
+
# Verileri tabloya ekle
|
| 659 |
+
row = 4
|
| 660 |
+
for item in data['iscilik_maliyetleri']:
|
| 661 |
+
ws_labor[f'A{row}'] = item.get('pozisyon', '')
|
| 662 |
+
ws_labor[f'B{row}'] = item.get('saat_ucreti', 0)
|
| 663 |
+
ws_labor[f'C{row}'] = item.get('sure', 0)
|
| 664 |
+
ws_labor[f'D{row}'] = item.get('toplam', 0)
|
| 665 |
+
|
| 666 |
+
for col in ['A', 'B', 'C', 'D']:
|
| 667 |
+
ws_labor[f'{col}{row}'].font = normal_font
|
| 668 |
+
ws_labor[f'{col}{row}'].border = thin_border
|
| 669 |
+
|
| 670 |
+
row += 1
|
| 671 |
+
|
| 672 |
+
# Toplam satırı
|
| 673 |
+
if 'iscilik_toplam' in data:
|
| 674 |
+
ws_labor[f'A{row}'] = "TOPLAM"
|
| 675 |
+
ws_labor[f'D{row}'] = data['iscilik_toplam']
|
| 676 |
+
|
| 677 |
+
ws_labor.merge_cells(f'A{row}:C{row}')
|
| 678 |
+
|
| 679 |
+
for col in ['A', 'D']:
|
| 680 |
+
ws_labor[f'{col}{row}'].font = total_font
|
| 681 |
+
ws_labor[f'{col}{row}'].fill = total_fill
|
| 682 |
+
ws_labor[f'{col}{row}'].border = thin_border
|
| 683 |
+
|
| 684 |
+
# Malzeme maliyetleri sayfası
|
| 685 |
+
if 'malzeme_maliyetleri' in data:
|
| 686 |
+
ws_material.column_dimensions['A'].width = 30
|
| 687 |
+
ws_material.column_dimensions['B'].width = 15
|
| 688 |
+
ws_material.column_dimensions['C'].width = 15
|
| 689 |
+
ws_material.column_dimensions['D'].width = 15
|
| 690 |
+
ws_material.column_dimensions['E'].width = 15
|
| 691 |
+
|
| 692 |
+
# Başlık
|
| 693 |
+
ws_material.merge_cells('A1:E1')
|
| 694 |
+
ws_material['A1'] = "MALZEME MALİYETLERİ"
|
| 695 |
+
ws_material['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 696 |
+
ws_material['A1'].alignment = center_align
|
| 697 |
+
|
| 698 |
+
# Tablo başlıkları
|
| 699 |
+
ws_material['A3'] = "Malzeme"
|
| 700 |
+
ws_material['B3'] = "Birim Fiyat (TL)"
|
| 701 |
+
ws_material['C3'] = "Miktar"
|
| 702 |
+
ws_material['D3'] = "Birim"
|
| 703 |
+
ws_material['E3'] = "Toplam (TL)"
|
| 704 |
+
|
| 705 |
+
for col in ['A', 'B', 'C', 'D', 'E']:
|
| 706 |
+
ws_material[f'{col}3'].font = header_font
|
| 707 |
+
ws_material[f'{col}3'].fill = header_fill
|
| 708 |
+
ws_material[f'{col}3'].border = thin_border
|
| 709 |
+
|
| 710 |
+
# Verileri tabloya ekle
|
| 711 |
+
row = 4
|
| 712 |
+
for item in data['malzeme_maliyetleri']:
|
| 713 |
+
ws_material[f'A{row}'] = item.get('malzeme', '')
|
| 714 |
+
ws_material[f'B{row}'] = item.get('birim_fiyat', 0)
|
| 715 |
+
ws_material[f'C{row}'] = item.get('miktar', 0)
|
| 716 |
+
ws_material[f'D{row}'] = item.get('birim', '')
|
| 717 |
+
ws_material[f'E{row}'] = item.get('toplam', 0)
|
| 718 |
+
|
| 719 |
+
for col in ['A', 'B', 'C', 'D', 'E']:
|
| 720 |
+
ws_material[f'{col}{row}'].font = normal_font
|
| 721 |
+
ws_material[f'{col}{row}'].border = thin_border
|
| 722 |
+
|
| 723 |
+
row += 1
|
| 724 |
+
|
| 725 |
+
# Toplam satırı
|
| 726 |
+
if 'malzeme_toplam' in data:
|
| 727 |
+
ws_material[f'A{row}'] = "TOPLAM"
|
| 728 |
+
ws_material[f'E{row}'] = data['malzeme_toplam']
|
| 729 |
+
|
| 730 |
+
ws_material.merge_cells(f'A{row}:D{row}')
|
| 731 |
+
|
| 732 |
+
for col in ['A', 'E']:
|
| 733 |
+
ws_material[f'{col}{row}'].font = total_font
|
| 734 |
+
ws_material[f'{col}{row}'].fill = total_fill
|
| 735 |
+
ws_material[f'{col}{row}'].border = thin_border
|
| 736 |
+
|
| 737 |
+
def _create_offer_excel(wb, data):
|
| 738 |
+
"""
|
| 739 |
+
Teklif Excel dokümanı oluşturur
|
| 740 |
+
"""
|
| 741 |
+
# Varsayılan sayfayı sil
|
| 742 |
+
if 'Sheet' in wb.sheetnames:
|
| 743 |
+
del wb['Sheet']
|
| 744 |
+
|
| 745 |
+
# Sayfalar oluştur
|
| 746 |
+
ws_offer = wb.create_sheet("Teklif")
|
| 747 |
+
ws_details = wb.create_sheet("Detaylar")
|
| 748 |
+
|
| 749 |
+
# Stil tanımlamaları
|
| 750 |
+
header_font = Font(name='Arial', size=12, bold=True)
|
| 751 |
+
normal_font = Font(name='Arial', size=11)
|
| 752 |
+
total_font = Font(name='Arial', size=11, bold=True)
|
| 753 |
+
|
| 754 |
+
header_fill = PatternFill(start_color="DDDDDD", end_color="DDDDDD", fill_type="solid")
|
| 755 |
+
total_fill = PatternFill(start_color="EEEEEE", end_color="EEEEEE", fill_type="solid")
|
| 756 |
+
|
| 757 |
+
center_align = Alignment(horizontal='center', vertical='center')
|
| 758 |
+
right_align = Alignment(horizontal='right', vertical='center')
|
| 759 |
+
|
| 760 |
+
thin_border = Border(
|
| 761 |
+
left=Side(style='thin'),
|
| 762 |
+
right=Side(style='thin'),
|
| 763 |
+
top=Side(style='thin'),
|
| 764 |
+
bottom=Side(style='thin')
|
| 765 |
+
)
|
| 766 |
+
|
| 767 |
+
# Teklif sayfası
|
| 768 |
+
ws_offer.column_dimensions['A'].width = 30
|
| 769 |
+
ws_offer.column_dimensions['B'].width = 15
|
| 770 |
+
ws_offer.column_dimensions['C'].width = 15
|
| 771 |
+
ws_offer.column_dimensions['D'].width = 15
|
| 772 |
+
ws_offer.column_dimensions['E'].width = 15
|
| 773 |
+
|
| 774 |
+
# Başlık
|
| 775 |
+
ws_offer.merge_cells('A1:E1')
|
| 776 |
+
ws_offer['A1'] = "TEKLİF"
|
| 777 |
+
ws_offer['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 778 |
+
ws_offer['A1'].alignment = center_align
|
| 779 |
+
|
| 780 |
+
# Tarih ve referans
|
| 781 |
+
ws_offer['D2'] = "Tarih:"
|
| 782 |
+
ws_offer['E2'] = datetime.now().strftime('%d.%m.%Y')
|
| 783 |
+
ws_offer['D2'].font = normal_font
|
| 784 |
+
ws_offer['E2'].font = normal_font
|
| 785 |
+
|
| 786 |
+
if 'referans_no' in data:
|
| 787 |
+
ws_offer['D3'] = "Referans No:"
|
| 788 |
+
ws_offer['E3'] = data['referans_no']
|
| 789 |
+
ws_offer['D3'].font = normal_font
|
| 790 |
+
ws_offer['E3'].font = normal_font
|
| 791 |
+
|
| 792 |
+
# Müşteri bilgileri
|
| 793 |
+
row = 5
|
| 794 |
+
if 'musteri_bilgileri' in data:
|
| 795 |
+
ws_offer['A4'] = "MÜŞTERİ BİLGİLERİ"
|
| 796 |
+
ws_offer['A4'].font = header_font
|
| 797 |
+
|
| 798 |
+
for key, value in data['musteri_bilgileri'].items():
|
| 799 |
+
ws_offer[f'A{row}'] = key
|
| 800 |
+
ws_offer[f'B{row}'] = value
|
| 801 |
+
ws_offer[f'A{row}'].font = normal_font
|
| 802 |
+
ws_offer[f'B{row}'].font = normal_font
|
| 803 |
+
row += 1
|
| 804 |
+
|
| 805 |
+
row += 1
|
| 806 |
+
|
| 807 |
+
# Proje bilgileri
|
| 808 |
+
if 'proje_bilgileri' in data:
|
| 809 |
+
ws_offer[f'A{row}'] = "PROJE BİLGİLERİ"
|
| 810 |
+
ws_offer[f'A{row}'].font = header_font
|
| 811 |
+
row += 1
|
| 812 |
+
|
| 813 |
+
for key, value in data['proje_bilgileri'].items():
|
| 814 |
+
ws_offer[f'A{row}'] = key
|
| 815 |
+
ws_offer[f'B{row}'] = value
|
| 816 |
+
ws_offer[f'A{row}'].font = normal_font
|
| 817 |
+
ws_offer[f'B{row}'].font = normal_font
|
| 818 |
+
row += 1
|
| 819 |
+
|
| 820 |
+
row += 1
|
| 821 |
+
|
| 822 |
+
# Teklif detayları
|
| 823 |
+
if 'teklif_detaylari' in data:
|
| 824 |
+
ws_offer[f'A{row}'] = "TEKLİF DETAYLARI"
|
| 825 |
+
ws_offer[f'A{row}'].font = header_font
|
| 826 |
+
row += 1
|
| 827 |
+
|
| 828 |
+
# Tablo başlıkları
|
| 829 |
+
ws_offer[f'A{row}'] = "Kalem"
|
| 830 |
+
ws_offer[f'B{row}'] = "Açıklama"
|
| 831 |
+
ws_offer[f'C{row}'] = "Miktar"
|
| 832 |
+
ws_offer[f'D{row}'] = "Birim Fiyat (TL)"
|
| 833 |
+
ws_offer[f'E{row}'] = "Toplam (TL)"
|
| 834 |
+
|
| 835 |
+
for col in ['A', 'B', 'C', 'D', 'E']:
|
| 836 |
+
ws_offer[f'{col}{row}'].font = header_font
|
| 837 |
+
ws_offer[f'{col}{row}'].fill = header_fill
|
| 838 |
+
ws_offer[f'{col}{row}'].border = thin_border
|
| 839 |
+
|
| 840 |
+
row += 1
|
| 841 |
+
|
| 842 |
+
# Verileri tabloya ekle
|
| 843 |
+
for item in data['teklif_detaylari']:
|
| 844 |
+
ws_offer[f'A{row}'] = item.get('kalem', '')
|
| 845 |
+
ws_offer[f'B{row}'] = item.get('aciklama', '')
|
| 846 |
+
ws_offer[f'C{row}'] = item.get('miktar', 0)
|
| 847 |
+
ws_offer[f'D{row}'] = item.get('birim_fiyat', 0)
|
| 848 |
+
ws_offer[f'E{row}'] = item.get('toplam', 0)
|
| 849 |
+
|
| 850 |
+
for col in ['A', 'B', 'C', 'D', 'E']:
|
| 851 |
+
ws_offer[f'{col}{row}'].font = normal_font
|
| 852 |
+
ws_offer[f'{col}{row}'].border = thin_border
|
| 853 |
+
|
| 854 |
+
row += 1
|
| 855 |
+
|
| 856 |
+
# Toplam satırı
|
| 857 |
+
if 'toplam_fiyat' in data:
|
| 858 |
+
ws_offer[f'A{row}'] = "TOPLAM"
|
| 859 |
+
ws_offer[f'E{row}'] = data['toplam_fiyat']
|
| 860 |
+
|
| 861 |
+
ws_offer.merge_cells(f'A{row}:D{row}')
|
| 862 |
+
|
| 863 |
+
for col in ['A', 'E']:
|
| 864 |
+
ws_offer[f'{col}{row}'].font = total_font
|
| 865 |
+
ws_offer[f'{col}{row}'].fill = total_fill
|
| 866 |
+
ws_offer[f'{col}{row}'].border = thin_border
|
| 867 |
+
|
| 868 |
+
# Detaylar sayfası
|
| 869 |
+
ws_details.column_dimensions['A'].width = 30
|
| 870 |
+
ws_details.column_dimensions['B'].width = 50
|
| 871 |
+
|
| 872 |
+
# Başlık
|
| 873 |
+
ws_details.merge_cells('A1:B1')
|
| 874 |
+
ws_details['A1'] = "TEKLİF DETAYLARI"
|
| 875 |
+
ws_details['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 876 |
+
ws_details['A1'].alignment = center_align
|
| 877 |
+
|
| 878 |
+
row = 3
|
| 879 |
+
|
| 880 |
+
# Ödeme ve teslimat koşulları
|
| 881 |
+
if 'odeme_kosullari' in data or 'teslimat_kosullari' in data:
|
| 882 |
+
ws_details[f'A{row}'] = "ÖDEME VE TESLİMAT KOŞULLARI"
|
| 883 |
+
ws_details[f'A{row}'].font = header_font
|
| 884 |
+
row += 1
|
| 885 |
+
|
| 886 |
+
if 'odeme_kosullari' in data:
|
| 887 |
+
ws_details[f'A{row}'] = "Ödeme Koşulları:"
|
| 888 |
+
ws_details[f'A{row}'].font = Font(name='Arial', size=11, bold=True)
|
| 889 |
+
row += 1
|
| 890 |
+
|
| 891 |
+
ws_details[f'A{row}'] = data['odeme_kosullari']
|
| 892 |
+
ws_details[f'A{row}'].font = normal_font
|
| 893 |
+
row += 2
|
| 894 |
+
|
| 895 |
+
if 'teslimat_kosullari' in data:
|
| 896 |
+
ws_details[f'A{row}'] = "Teslimat Koşulları:"
|
| 897 |
+
ws_details[f'A{row}'].font = Font(name='Arial', size=11, bold=True)
|
| 898 |
+
row += 1
|
| 899 |
+
|
| 900 |
+
ws_details[f'A{row}'] = data['teslimat_kosullari']
|
| 901 |
+
ws_details[f'A{row}'].font = normal_font
|
| 902 |
+
row += 2
|
| 903 |
+
|
| 904 |
+
# Geçerlilik süresi
|
| 905 |
+
if 'gecerlilik_suresi' in data:
|
| 906 |
+
ws_details[f'A{row}'] = "GEÇERLİLİK SÜRESİ"
|
| 907 |
+
ws_details[f'A{row}'].font = header_font
|
| 908 |
+
row += 1
|
| 909 |
+
|
| 910 |
+
ws_details[f'A{row}'] = data['gecerlilik_suresi']
|
| 911 |
+
ws_details[f'A{row}'].font = normal_font
|
| 912 |
+
row += 2
|
| 913 |
+
|
| 914 |
+
# Firma bilgileri
|
| 915 |
+
if 'firma_bilgileri' in data:
|
| 916 |
+
ws_details[f'A{row}'] = "FİRMA BİLGİLERİ"
|
| 917 |
+
ws_details[f'A{row}'].font = header_font
|
| 918 |
+
row += 1
|
| 919 |
+
|
| 920 |
+
for key, value in data['firma_bilgileri'].items():
|
| 921 |
+
ws_details[f'A{row}'] = value
|
| 922 |
+
ws_details[f'A{row}'].font = normal_font
|
| 923 |
+
row += 1
|
| 924 |
+
|
| 925 |
+
def _create_default_excel(wb, data):
|
| 926 |
+
"""
|
| 927 |
+
Varsayılan Excel dokümanı oluşturur
|
| 928 |
+
"""
|
| 929 |
+
# Varsayılan sayfayı al
|
| 930 |
+
ws = wb.active
|
| 931 |
+
ws.title = "Doküman"
|
| 932 |
+
|
| 933 |
+
# Stil tanımlamaları
|
| 934 |
+
header_font = Font(name='Arial', size=12, bold=True)
|
| 935 |
+
normal_font = Font(name='Arial', size=11)
|
| 936 |
+
|
| 937 |
+
center_align = Alignment(horizontal='center', vertical='center')
|
| 938 |
+
|
| 939 |
+
# Başlık
|
| 940 |
+
ws.merge_cells('A1:D1')
|
| 941 |
+
ws['A1'] = data.get('baslik', 'DOKÜMAN')
|
| 942 |
+
ws['A1'].font = Font(name='Arial', size=14, bold=True)
|
| 943 |
+
ws['A1'].alignment = center_align
|
| 944 |
+
|
| 945 |
+
# Tarih
|
| 946 |
+
ws['D2'] = "Tarih:"
|
| 947 |
+
ws['E2'] = datetime.now().strftime('%d.%m.%Y')
|
| 948 |
+
ws['D2'].font = normal_font
|
| 949 |
+
ws['E2'].font = normal_font
|
| 950 |
+
|
| 951 |
+
# İçerik
|
| 952 |
+
row = 4
|
| 953 |
+
if 'icerik' in data:
|
| 954 |
+
for item in data['icerik']:
|
| 955 |
+
if item.get('tip') == 'baslik':
|
| 956 |
+
ws[f'A{row}'] = item.get('metin', '')
|
| 957 |
+
ws[f'A{row}'].font = header_font
|
| 958 |
+
row += 1
|
| 959 |
+
elif item.get('tip') == 'paragraf':
|
| 960 |
+
ws[f'A{row}'] = item.get('metin', '')
|
| 961 |
+
ws[f'A{row}'].font = normal_font
|
| 962 |
+
row += 1
|
| 963 |
+
elif item.get('tip') == 'liste':
|
| 964 |
+
for liste_item in item.get('liste_ogeler', []):
|
| 965 |
+
ws[f'A{row}'] = "• " + liste_item
|
| 966 |
+
ws[f'A{row}'].font = normal_font
|
| 967 |
+
row += 1
|
| 968 |
+
|
| 969 |
+
def save_document_to_db(db: Session, filename, content_type, file_path, file_size, document_type, template_type):
|
| 970 |
+
"""
|
| 971 |
+
Oluşturulan doküman bilgilerini veritabanına kaydet
|
| 972 |
+
"""
|
| 973 |
+
document = Document(
|
| 974 |
+
filename=filename,
|
| 975 |
+
content_type=content_type,
|
| 976 |
+
file_path=file_path,
|
| 977 |
+
file_size=file_size,
|
| 978 |
+
document_type=document_type,
|
| 979 |
+
template_type=template_type
|
| 980 |
+
)
|
| 981 |
+
|
| 982 |
+
db.add(document)
|
| 983 |
+
db.commit()
|
| 984 |
+
db.refresh(document)
|
| 985 |
+
return document
|
app/document_processor.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
import pytesseract
|
| 4 |
+
from PIL import Image
|
| 5 |
+
from pdf2image import convert_from_path
|
| 6 |
+
import docx
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
from sqlalchemy.orm import Session
|
| 9 |
+
from app.db.models import Document
|
| 10 |
+
from app.agent import create_agent
|
| 11 |
+
|
| 12 |
+
# Desteklenen dosya türleri
|
| 13 |
+
SUPPORTED_CONTENT_TYPES = {
|
| 14 |
+
'application/pdf': 'pdf',
|
| 15 |
+
'image/jpeg': 'image',
|
| 16 |
+
'image/png': 'image',
|
| 17 |
+
'image/tiff': 'image',
|
| 18 |
+
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'docx'
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
# Dosya yükleme dizini
|
| 22 |
+
UPLOAD_DIR = os.path.join(os.getcwd(), 'uploads')
|
| 23 |
+
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 24 |
+
|
| 25 |
+
def save_uploaded_file(file, filename):
|
| 26 |
+
"""Yüklenen dosyayı kaydet"""
|
| 27 |
+
file_path = os.path.join(UPLOAD_DIR, filename)
|
| 28 |
+
with open(file_path, "wb") as buffer:
|
| 29 |
+
buffer.write(file.file.read())
|
| 30 |
+
return file_path
|
| 31 |
+
|
| 32 |
+
def extract_text_from_pdf(file_path):
|
| 33 |
+
"""PDF dosyasından metin çıkar"""
|
| 34 |
+
try:
|
| 35 |
+
# PDF'i görüntülere dönüştür
|
| 36 |
+
images = convert_from_path(file_path)
|
| 37 |
+
text = ""
|
| 38 |
+
|
| 39 |
+
# Her sayfadan metin çıkar
|
| 40 |
+
for img in images:
|
| 41 |
+
text += pytesseract.image_to_string(img, lang='tur') + "\n"
|
| 42 |
+
|
| 43 |
+
return text
|
| 44 |
+
except Exception as e:
|
| 45 |
+
print(f"PDF işleme hatası: {str(e)}")
|
| 46 |
+
return ""
|
| 47 |
+
|
| 48 |
+
def extract_text_from_image(file_path):
|
| 49 |
+
"""Görüntüden metin çıkar"""
|
| 50 |
+
try:
|
| 51 |
+
img = Image.open(file_path)
|
| 52 |
+
text = pytesseract.image_to_string(img, lang='tur')
|
| 53 |
+
return text
|
| 54 |
+
except Exception as e:
|
| 55 |
+
print(f"Görüntü işleme hatası: {str(e)}")
|
| 56 |
+
return ""
|
| 57 |
+
|
| 58 |
+
def extract_text_from_docx(file_path):
|
| 59 |
+
"""DOCX dosyasından metin çıkar"""
|
| 60 |
+
try:
|
| 61 |
+
doc = docx.Document(file_path)
|
| 62 |
+
text = "\n".join([paragraph.text for paragraph in doc.paragraphs])
|
| 63 |
+
return text
|
| 64 |
+
except Exception as e:
|
| 65 |
+
print(f"DOCX işleme hatası: {str(e)}")
|
| 66 |
+
return ""
|
| 67 |
+
|
| 68 |
+
def process_document(file_path, content_type):
|
| 69 |
+
"""Belge türüne göre metin çıkar"""
|
| 70 |
+
file_type = SUPPORTED_CONTENT_TYPES.get(content_type)
|
| 71 |
+
|
| 72 |
+
if not file_type:
|
| 73 |
+
return "Desteklenmeyen dosya türü"
|
| 74 |
+
|
| 75 |
+
if file_type == 'pdf':
|
| 76 |
+
return extract_text_from_pdf(file_path)
|
| 77 |
+
elif file_type == 'image':
|
| 78 |
+
return extract_text_from_image(file_path)
|
| 79 |
+
elif file_type == 'docx':
|
| 80 |
+
return extract_text_from_docx(file_path)
|
| 81 |
+
|
| 82 |
+
return ""
|
| 83 |
+
|
| 84 |
+
def analyze_document_content(content, db: Session):
|
| 85 |
+
"""Belge içeriğini analiz et"""
|
| 86 |
+
try:
|
| 87 |
+
# Agent oluştur
|
| 88 |
+
agent_executor = create_agent(db)
|
| 89 |
+
|
| 90 |
+
# Analiz için prompt
|
| 91 |
+
prompt = f"""Bu belgeyi analiz et ve aşağıdaki bilgileri çıkar:
|
| 92 |
+
1. Belgedeki maliyet hesaplamalarıyla ilgili tüm bilgiler
|
| 93 |
+
2. İşçilik maliyetleri
|
| 94 |
+
3. Malzeme maliyetleri
|
| 95 |
+
4. Kar marjı bilgileri
|
| 96 |
+
5. Toplam maliyet
|
| 97 |
+
|
| 98 |
+
Belge içeriği:
|
| 99 |
+
{content}
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
# Agent'ı çalıştır
|
| 103 |
+
result = agent_executor.invoke({"input": prompt})
|
| 104 |
+
analysis = result["output"]
|
| 105 |
+
|
| 106 |
+
# Analiz sonucunu JSON formatında döndür
|
| 107 |
+
return json.dumps({
|
| 108 |
+
"analysis": analysis,
|
| 109 |
+
"analyzed_at": datetime.now().isoformat()
|
| 110 |
+
}, ensure_ascii=False)
|
| 111 |
+
except Exception as e:
|
| 112 |
+
print(f"Analiz hatası: {str(e)}")
|
| 113 |
+
return json.dumps({
|
| 114 |
+
"error": f"Analiz sırasında hata oluştu: {str(e)}",
|
| 115 |
+
"analyzed_at": datetime.now().isoformat()
|
| 116 |
+
}, ensure_ascii=False)
|
| 117 |
+
|
| 118 |
+
def save_document_to_db(db: Session, filename, content_type, file_path, file_size, content_text=None, analysis_result=None):
|
| 119 |
+
"""Belge bilgilerini veritabanına kaydet"""
|
| 120 |
+
document = Document(
|
| 121 |
+
filename=filename,
|
| 122 |
+
content_type=content_type,
|
| 123 |
+
file_path=file_path,
|
| 124 |
+
file_size=file_size,
|
| 125 |
+
content_text=content_text,
|
| 126 |
+
analysis_result=analysis_result
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
if analysis_result:
|
| 130 |
+
document.analyzed_at = datetime.now()
|
| 131 |
+
|
| 132 |
+
db.add(document)
|
| 133 |
+
db.commit()
|
| 134 |
+
db.refresh(document)
|
| 135 |
+
return document
|
app/main.py
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks, File, UploadFile, Form, Query
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
from fastapi.responses import JSONResponse
|
| 4 |
+
from fastapi.staticfiles import StaticFiles
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
from typing import List, Optional, Dict, Any
|
| 8 |
+
import os
|
| 9 |
+
import shutil
|
| 10 |
+
import json
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
|
| 13 |
+
from app.db.database import get_db, engine
|
| 14 |
+
from app.db.models import Base, ChatHistory, Document
|
| 15 |
+
from app.agent import create_agent
|
| 16 |
+
from app import document_processor
|
| 17 |
+
from app import document_generator
|
| 18 |
+
|
| 19 |
+
# Create tables if they don't exist
|
| 20 |
+
Base.metadata.create_all(bind=engine)
|
| 21 |
+
|
| 22 |
+
# Initialize FastAPI app
|
| 23 |
+
app = FastAPI(
|
| 24 |
+
title="Endüstri Chatbot API",
|
| 25 |
+
description="Endüstriyel maliyet hesaplama için chatbot API",
|
| 26 |
+
version="0.1.0"
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
# Add CORS middleware
|
| 30 |
+
app.add_middleware(
|
| 31 |
+
CORSMiddleware,
|
| 32 |
+
allow_origins=["*"], # In production, replace with specific origins
|
| 33 |
+
allow_credentials=True,
|
| 34 |
+
allow_methods=["*"],
|
| 35 |
+
allow_headers=["*"],
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
# Define request and response models
|
| 39 |
+
class ChatRequest(BaseModel):
|
| 40 |
+
message: str
|
| 41 |
+
|
| 42 |
+
class ChatResponse(BaseModel):
|
| 43 |
+
response: str
|
| 44 |
+
|
| 45 |
+
class HealthResponse(BaseModel):
|
| 46 |
+
status: str
|
| 47 |
+
model_name: str
|
| 48 |
+
|
| 49 |
+
class DocumentResponse(BaseModel):
|
| 50 |
+
id: int
|
| 51 |
+
filename: str
|
| 52 |
+
content_type: str
|
| 53 |
+
file_size: int
|
| 54 |
+
document_type: Optional[str] = None
|
| 55 |
+
template_type: Optional[str] = None
|
| 56 |
+
uploaded_at: datetime
|
| 57 |
+
analyzed_at: Optional[datetime] = None
|
| 58 |
+
analysis_summary: Optional[str] = None
|
| 59 |
+
|
| 60 |
+
class DocumentAnalysisResponse(BaseModel):
|
| 61 |
+
id: int
|
| 62 |
+
filename: str
|
| 63 |
+
analysis_result: Dict[str, Any]
|
| 64 |
+
analyzed_at: datetime
|
| 65 |
+
|
| 66 |
+
class DocumentGenerateRequest(BaseModel):
|
| 67 |
+
document_type: str # 'word' veya 'excel'
|
| 68 |
+
template_type: str # 'maliyet_raporu', 'teklif', vs.
|
| 69 |
+
data: Dict[str, Any] # Doküman içeriği için veriler
|
| 70 |
+
|
| 71 |
+
class DocumentGenerateResponse(BaseModel):
|
| 72 |
+
id: int
|
| 73 |
+
filename: str
|
| 74 |
+
document_type: str
|
| 75 |
+
template_type: str
|
| 76 |
+
file_path: str
|
| 77 |
+
file_size: int
|
| 78 |
+
uploaded_at: datetime
|
| 79 |
+
|
| 80 |
+
# Save chat history to database
|
| 81 |
+
def save_chat_history(db: Session, user_input: str, assistant_response: str):
|
| 82 |
+
chat_entry = ChatHistory(
|
| 83 |
+
user_input=user_input,
|
| 84 |
+
assistant_response=assistant_response
|
| 85 |
+
)
|
| 86 |
+
db.add(chat_entry)
|
| 87 |
+
db.commit()
|
| 88 |
+
|
| 89 |
+
# Health check endpoint
|
| 90 |
+
@app.get("/health", response_model=HealthResponse)
|
| 91 |
+
def health_check():
|
| 92 |
+
return {
|
| 93 |
+
"status": "ok",
|
| 94 |
+
"model_name": os.getenv("MODEL_NAME", "bigscience/bloomz-560m")
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
# Chat endpoint
|
| 98 |
+
@app.post("/chat", response_model=ChatResponse)
|
| 99 |
+
async def chat(request: ChatRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
| 100 |
+
try:
|
| 101 |
+
# Create agent
|
| 102 |
+
agent_executor = create_agent(db)
|
| 103 |
+
|
| 104 |
+
# Run agent
|
| 105 |
+
result = agent_executor.invoke({"input": request.message})
|
| 106 |
+
response = result["output"]
|
| 107 |
+
|
| 108 |
+
# Save chat history in background
|
| 109 |
+
background_tasks.add_task(save_chat_history, db, request.message, response)
|
| 110 |
+
|
| 111 |
+
return {"response": response}
|
| 112 |
+
except Exception as e:
|
| 113 |
+
raise HTTPException(status_code=500, detail=f"Error processing request: {str(e)}")
|
| 114 |
+
|
| 115 |
+
# Get chat history endpoint
|
| 116 |
+
@app.get("/chat/history", response_model=List[dict])
|
| 117 |
+
def get_chat_history(skip: int = 0, limit: int = 10, db: Session = Depends(get_db)):
|
| 118 |
+
history = db.query(ChatHistory).order_by(ChatHistory.created_at.desc()).offset(skip).limit(limit).all()
|
| 119 |
+
return [{
|
| 120 |
+
"id": entry.id,
|
| 121 |
+
"user_input": entry.user_input,
|
| 122 |
+
"assistant_response": entry.assistant_response,
|
| 123 |
+
"created_at": entry.created_at
|
| 124 |
+
} for entry in history]
|
| 125 |
+
|
| 126 |
+
# Belge yükleme endpoint'i
|
| 127 |
+
@app.post("/documents/upload", response_model=DocumentResponse)
|
| 128 |
+
async def upload_document(
|
| 129 |
+
file: UploadFile = File(...),
|
| 130 |
+
analyze: bool = Form(False),
|
| 131 |
+
db: Session = Depends(get_db)
|
| 132 |
+
):
|
| 133 |
+
# Desteklenen dosya türlerini kontrol et
|
| 134 |
+
if file.content_type not in document_processor.SUPPORTED_CONTENT_TYPES:
|
| 135 |
+
raise HTTPException(
|
| 136 |
+
status_code=400,
|
| 137 |
+
detail=f"Desteklenmeyen dosya türü: {file.content_type}. Desteklenen türler: {list(document_processor.SUPPORTED_CONTENT_TYPES.keys())}"
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
try:
|
| 141 |
+
# Dosyayı kaydet
|
| 142 |
+
filename = f"{datetime.now().strftime('%Y%m%d%H%M%S')}_{file.filename}"
|
| 143 |
+
file_path = document_processor.save_uploaded_file(file, filename)
|
| 144 |
+
file_size = os.path.getsize(file_path)
|
| 145 |
+
|
| 146 |
+
# Metin çıkar ve analiz et
|
| 147 |
+
content_text = None
|
| 148 |
+
analysis_result = None
|
| 149 |
+
|
| 150 |
+
if analyze:
|
| 151 |
+
content_text = document_processor.process_document(file_path, file.content_type)
|
| 152 |
+
if content_text:
|
| 153 |
+
analysis_result = document_processor.analyze_document_content(content_text, db)
|
| 154 |
+
|
| 155 |
+
# Veritabanına kaydet
|
| 156 |
+
document = document_processor.save_document_to_db(
|
| 157 |
+
db, filename, file.content_type, file_path, file_size, content_text, analysis_result
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
# Yanıt oluştur
|
| 161 |
+
response = {
|
| 162 |
+
"id": document.id,
|
| 163 |
+
"filename": document.filename,
|
| 164 |
+
"content_type": document.content_type,
|
| 165 |
+
"file_size": document.file_size,
|
| 166 |
+
"uploaded_at": document.uploaded_at,
|
| 167 |
+
"analyzed_at": document.analyzed_at
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
if analysis_result:
|
| 171 |
+
response["analysis_summary"] = "Belge analiz edildi. /documents/{id}/analysis endpoint'inden sonuçları görüntüleyebilirsiniz."
|
| 172 |
+
|
| 173 |
+
return response
|
| 174 |
+
|
| 175 |
+
except Exception as e:
|
| 176 |
+
raise HTTPException(status_code=500, detail=f"Belge yükleme hatası: {str(e)}")
|
| 177 |
+
|
| 178 |
+
# Belge analiz endpoint'i
|
| 179 |
+
@app.post("/documents/{document_id}/analyze", response_model=DocumentAnalysisResponse)
|
| 180 |
+
async def analyze_document(
|
| 181 |
+
document_id: int,
|
| 182 |
+
db: Session = Depends(get_db)
|
| 183 |
+
):
|
| 184 |
+
# Belgeyi veritabanından al
|
| 185 |
+
document = db.query(Document).filter(Document.id == document_id).first()
|
| 186 |
+
if not document:
|
| 187 |
+
raise HTTPException(status_code=404, detail=f"Belge bulunamadı: {document_id}")
|
| 188 |
+
|
| 189 |
+
try:
|
| 190 |
+
# Eğer metin çıkarılmamışsa, çıkar
|
| 191 |
+
if not document.content_text:
|
| 192 |
+
document.content_text = document_processor.process_document(document.file_path, document.content_type)
|
| 193 |
+
db.commit()
|
| 194 |
+
|
| 195 |
+
# Metni analiz et
|
| 196 |
+
analysis_result = document_processor.analyze_document_content(document.content_text, db)
|
| 197 |
+
|
| 198 |
+
# Analiz sonucunu güncelle
|
| 199 |
+
document.analysis_result = analysis_result
|
| 200 |
+
document.analyzed_at = datetime.now()
|
| 201 |
+
db.commit()
|
| 202 |
+
|
| 203 |
+
# Yanıt oluştur
|
| 204 |
+
return {
|
| 205 |
+
"id": document.id,
|
| 206 |
+
"filename": document.filename,
|
| 207 |
+
"analysis_result": json.loads(document.analysis_result),
|
| 208 |
+
"analyzed_at": document.analyzed_at
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
except Exception as e:
|
| 212 |
+
raise HTTPException(status_code=500, detail=f"Belge analiz hatası: {str(e)}")
|
| 213 |
+
|
| 214 |
+
# Belge listesi endpoint'i
|
| 215 |
+
@app.get("/documents", response_model=List[DocumentResponse])
|
| 216 |
+
async def list_documents(
|
| 217 |
+
skip: int = 0,
|
| 218 |
+
limit: int = 10,
|
| 219 |
+
db: Session = Depends(get_db)
|
| 220 |
+
):
|
| 221 |
+
documents = db.query(Document).order_by(Document.uploaded_at.desc()).offset(skip).limit(limit).all()
|
| 222 |
+
|
| 223 |
+
return [
|
| 224 |
+
{
|
| 225 |
+
"id": doc.id,
|
| 226 |
+
"filename": doc.filename,
|
| 227 |
+
"content_type": doc.content_type,
|
| 228 |
+
"file_size": doc.file_size,
|
| 229 |
+
"uploaded_at": doc.uploaded_at,
|
| 230 |
+
"analyzed_at": doc.analyzed_at,
|
| 231 |
+
"analysis_summary": "Analiz sonuçları mevcut" if doc.analysis_result else None
|
| 232 |
+
}
|
| 233 |
+
for doc in documents
|
| 234 |
+
]
|
| 235 |
+
|
| 236 |
+
# Belge detayı endpoint'i
|
| 237 |
+
@app.get("/documents/{document_id}", response_model=DocumentResponse)
|
| 238 |
+
async def get_document(
|
| 239 |
+
document_id: int,
|
| 240 |
+
db: Session = Depends(get_db)
|
| 241 |
+
):
|
| 242 |
+
document = db.query(Document).filter(Document.id == document_id).first()
|
| 243 |
+
if not document:
|
| 244 |
+
raise HTTPException(status_code=404, detail=f"Belge bulunamadı: {document_id}")
|
| 245 |
+
|
| 246 |
+
return {
|
| 247 |
+
"id": document.id,
|
| 248 |
+
"filename": document.filename,
|
| 249 |
+
"content_type": document.content_type,
|
| 250 |
+
"file_size": document.file_size,
|
| 251 |
+
"uploaded_at": document.uploaded_at,
|
| 252 |
+
"analyzed_at": document.analyzed_at,
|
| 253 |
+
"analysis_summary": "Analiz sonuçları mevcut" if document.analysis_result else None
|
| 254 |
+
}
|
| 255 |
+
|
| 256 |
+
# Belge analiz sonucu endpoint'i
|
| 257 |
+
@app.get("/documents/{document_id}/analysis", response_model=DocumentAnalysisResponse)
|
| 258 |
+
async def get_document_analysis(
|
| 259 |
+
document_id: int,
|
| 260 |
+
db: Session = Depends(get_db)
|
| 261 |
+
):
|
| 262 |
+
document = db.query(Document).filter(Document.id == document_id).first()
|
| 263 |
+
if not document:
|
| 264 |
+
raise HTTPException(status_code=404, detail=f"Belge bulunamadı: {document_id}")
|
| 265 |
+
|
| 266 |
+
if not document.analysis_result:
|
| 267 |
+
raise HTTPException(status_code=404, detail=f"Belge henüz analiz edilmemiş: {document_id}")
|
| 268 |
+
|
| 269 |
+
return {
|
| 270 |
+
"id": document.id,
|
| 271 |
+
"filename": document.filename,
|
| 272 |
+
"analysis_result": json.loads(document.analysis_result),
|
| 273 |
+
"analyzed_at": document.analyzed_at
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
# Root endpoint
|
| 277 |
+
@app.get("/")
|
| 278 |
+
def read_root():
|
| 279 |
+
return {"message": "Endüstri Chatbot API'ye Hoş Geldiniz! /docs adresinden API dokümantasyonuna ulaşabilirsiniz."}
|
| 280 |
+
|
| 281 |
+
# Statik dosyaları sunmak için
|
| 282 |
+
app.mount("/uploads", StaticFiles(directory="uploads"), name="uploads")
|
| 283 |
+
|
| 284 |
+
# Doküman oluşturma endpoint'i
|
| 285 |
+
@app.post("/documents/generate", response_model=DocumentGenerateResponse)
|
| 286 |
+
async def generate_document(
|
| 287 |
+
request: DocumentGenerateRequest,
|
| 288 |
+
db: Session = Depends(get_db)
|
| 289 |
+
):
|
| 290 |
+
try:
|
| 291 |
+
# Doküman türüne göre oluştur
|
| 292 |
+
if request.document_type.lower() == 'word':
|
| 293 |
+
file_path = document_generator.create_word_document(request.data, request.template_type)
|
| 294 |
+
elif request.document_type.lower() == 'excel':
|
| 295 |
+
file_path = document_generator.create_excel_document(request.data, request.template_type)
|
| 296 |
+
else:
|
| 297 |
+
raise HTTPException(status_code=400, detail=f"Desteklenmeyen doküman türü: {request.document_type}. Desteklenen türler: word, excel")
|
| 298 |
+
|
| 299 |
+
# Dosya boyutunu al
|
| 300 |
+
file_size = os.path.getsize(file_path)
|
| 301 |
+
|
| 302 |
+
# Dosya adını al
|
| 303 |
+
filename = os.path.basename(file_path)
|
| 304 |
+
|
| 305 |
+
# İçerik türünü belirle
|
| 306 |
+
content_type = "application/vnd.openxmlformats-officedocument.wordprocessingml.document" if request.document_type.lower() == 'word' else "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
| 307 |
+
|
| 308 |
+
# Veritabanına kaydet
|
| 309 |
+
document = document_generator.save_document_to_db(
|
| 310 |
+
db, filename, content_type, file_path, file_size,
|
| 311 |
+
request.document_type.lower(), request.template_type
|
| 312 |
+
)
|
| 313 |
+
|
| 314 |
+
# Yanıt oluştur
|
| 315 |
+
return {
|
| 316 |
+
"id": document.id,
|
| 317 |
+
"filename": document.filename,
|
| 318 |
+
"document_type": document.document_type,
|
| 319 |
+
"template_type": document.template_type,
|
| 320 |
+
"file_path": document.file_path,
|
| 321 |
+
"file_size": document.file_size,
|
| 322 |
+
"uploaded_at": document.uploaded_at
|
| 323 |
+
}
|
| 324 |
+
|
| 325 |
+
except Exception as e:
|
| 326 |
+
raise HTTPException(status_code=500, detail=f"Doküman oluşturma hatası: {str(e)}")
|
| 327 |
+
|
| 328 |
+
# Doküman şablonları endpoint'i
|
| 329 |
+
@app.get("/documents/templates")
|
| 330 |
+
async def get_document_templates():
|
| 331 |
+
return {
|
| 332 |
+
"templates": [
|
| 333 |
+
{
|
| 334 |
+
"id": "maliyet_raporu",
|
| 335 |
+
"name": "Maliyet Raporu",
|
| 336 |
+
"description": "İşçilik ve malzeme maliyetlerini içeren detaylı maliyet raporu",
|
| 337 |
+
"supported_formats": ["word", "excel"]
|
| 338 |
+
},
|
| 339 |
+
{
|
| 340 |
+
"id": "teklif",
|
| 341 |
+
"name": "Teklif",
|
| 342 |
+
"description": "Müşteriye sunulacak resmi teklif dokümanı",
|
| 343 |
+
"supported_formats": ["word", "excel"]
|
| 344 |
+
}
|
| 345 |
+
]
|
| 346 |
+
}
|
| 347 |
+
|
| 348 |
+
# Run with: uvicorn app.main:app --reload
|
| 349 |
+
if __name__ == "__main__":
|
| 350 |
+
import uvicorn
|
| 351 |
+
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)
|
app/prompts.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Agent prompt templates
|
| 2 |
+
|
| 3 |
+
PREFIX = """
|
| 4 |
+
<s>[INST] Sen endüstriyel maliyet hesaplama konusunda uzmanlaşmış bir asistansın.
|
| 5 |
+
Görevin, kullanıcıların işçilik maliyeti, malzeme maliyeti ve marj hesaplamaları konusundaki sorularına yardımcı olmak.
|
| 6 |
+
|
| 7 |
+
Aşağıdaki araçları kullanarak kullanıcıya yardımcı olabilirsin:
|
| 8 |
+
|
| 9 |
+
1. labor_cost: İşçilik maliyetini hesaplar (saat × saatlik_ücret)
|
| 10 |
+
2. material_cost: Malzeme maliyetini hesaplar (birim fiyat × miktar)
|
| 11 |
+
3. apply_margin: Toplam maliyete marj uygular (toplam_maliyet × (1+marj))
|
| 12 |
+
|
| 13 |
+
Kullanıcının sorusunu dikkatlice analiz et ve gerekli hesaplamaları yapmak için uygun araçları kullan.
|
| 14 |
+
Eğer kullanıcı bir araç için gerekli parametreleri belirtmemişse, nazikçe bu bilgileri sor.
|
| 15 |
+
|
| 16 |
+
Kullanıcı geçmişi:
|
| 17 |
+
{chat_history}
|
| 18 |
+
|
| 19 |
+
Soru: {input}
|
| 20 |
+
{agent_scratchpad} [/INST]
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
FORMAT_INSTRUCTIONS = """
|
| 24 |
+
Düşünce sürecini adım adım açıkla:
|
| 25 |
+
1. Kullanıcının ne istediğini anla
|
| 26 |
+
2. Hangi aracı kullanman gerektiğine karar ver
|
| 27 |
+
3. Gerekli parametreleri belirle
|
| 28 |
+
4. Aracı çalıştır ve sonucu yorumla
|
| 29 |
+
|
| 30 |
+
Cevabını şu formatta ver:
|
| 31 |
+
Düşünce: Kullanıcının ne istediğini ve nasıl yardımcı olacağını düşün
|
| 32 |
+
Eylem: Kullanacağın aracın adı (labor_cost, material_cost veya apply_margin)
|
| 33 |
+
Eylem Girdisi: {"parametre1": "değer1", "parametre2": "değer2"}
|
| 34 |
+
Gözlem: Aracın çıktısı
|
| 35 |
+
... (gerekirse daha fazla düşünce, eylem, gözlem adımı)
|
| 36 |
+
Cevap: Kullanıcıya son cevabın
|
| 37 |
+
|
| 38 |
+
Önemli: Tüm yanıtlarını Türkçe olarak ver ve hesaplamaları net bir şekilde açıkla.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
SUFFIX = """
|
| 42 |
+
Önemli: Kullanıcıya her zaman Türkçe yanıt ver. Teknik terimleri açıkla ve hesaplamaları adım adım göster.
|
| 43 |
+
Eğer bir hesaplama yapamıyorsan, nedenini açıkla ve kullanıcıdan hangi ek bilgilere ihtiyacın olduğunu belirt.
|
| 44 |
+
Yanıtlarını mümkün olduğunca net, doğru ve yardımcı olacak şekilde yapılandır.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
# Intent extraction prompt
|
| 48 |
+
INTENT_EXTRACTION_PROMPT = """
|
| 49 |
+
Aşağıdaki kullanıcı mesajını analiz et ve kullanıcının niyetini belirle:
|
| 50 |
+
|
| 51 |
+
{user_input}
|
| 52 |
+
|
| 53 |
+
Aşağıdaki kategorilerden birini seç:
|
| 54 |
+
1. labor_cost_calculation: Kullanıcı işçilik maliyeti hesaplamak istiyor
|
| 55 |
+
2. material_cost_calculation: Kullanıcı malzeme maliyeti hesaplamak istiyor
|
| 56 |
+
3. margin_application: Kullanıcı marj uygulamak istiyor
|
| 57 |
+
4. general_question: Kullanıcı genel bir soru soruyor
|
| 58 |
+
5. greeting: Kullanıcı selamlama yapıyor
|
| 59 |
+
6. other: Diğer
|
| 60 |
+
|
| 61 |
+
Ayrıca, mesajdan aşağıdaki varlıkları çıkar (varsa):
|
| 62 |
+
- job_type: İş türü (örn. kaynakçı, elektrikçi)
|
| 63 |
+
- hours: Çalışma saati
|
| 64 |
+
- material_name: Malzeme adı
|
| 65 |
+
- quantity: Miktar
|
| 66 |
+
- total_cost: Toplam maliyet
|
| 67 |
+
- profile_name: Marj profili adı
|
| 68 |
+
|
| 69 |
+
Yanıtını JSON formatında ver.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
# Response generation prompt
|
| 73 |
+
RESPONSE_GENERATION_PROMPT = """
|
| 74 |
+
Kullanıcının mesajı: {user_input}
|
| 75 |
+
|
| 76 |
+
Niyet: {intent}
|
| 77 |
+
|
| 78 |
+
Varlıklar: {entities}
|
| 79 |
+
|
| 80 |
+
Araç sonucu: {tool_result}
|
| 81 |
+
|
| 82 |
+
Yukarıdaki bilgilere dayanarak, kullanıcıya nazik, yardımcı ve bilgilendirici bir yanıt oluştur.
|
| 83 |
+
Yanıt Türkçe olmalı ve teknik terimleri açıklamalıdır.
|
| 84 |
+
"""
|
app/seed.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from sqlalchemy.orm import Session
|
| 4 |
+
from app.db.database import SessionLocal, engine
|
| 5 |
+
from app.db.models import Base, LaborRate, Material, MarginProfile
|
| 6 |
+
|
| 7 |
+
def seed_database():
|
| 8 |
+
"""Seed the database with initial data"""
|
| 9 |
+
# Create a new session
|
| 10 |
+
db = SessionLocal()
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
# Check if data already exists
|
| 14 |
+
if db.query(LaborRate).count() > 0:
|
| 15 |
+
print("Database already seeded. Skipping...")
|
| 16 |
+
return
|
| 17 |
+
|
| 18 |
+
print("Seeding database...")
|
| 19 |
+
|
| 20 |
+
# Seed labor rates
|
| 21 |
+
labor_rates = [
|
| 22 |
+
LaborRate(job_type="Kaynakçı", hourly_rate=150.0),
|
| 23 |
+
LaborRate(job_type="Elektrikçi", hourly_rate=175.0),
|
| 24 |
+
LaborRate(job_type="Mühendis", hourly_rate=300.0),
|
| 25 |
+
LaborRate(job_type="Teknisyen", hourly_rate=125.0),
|
| 26 |
+
LaborRate(job_type="CNC Operatörü", hourly_rate=200.0),
|
| 27 |
+
]
|
| 28 |
+
db.add_all(labor_rates)
|
| 29 |
+
|
| 30 |
+
# Seed materials
|
| 31 |
+
materials = [
|
| 32 |
+
Material(name="Çelik Sac (1mm)", unit="m²", unit_price=250.0),
|
| 33 |
+
Material(name="Alüminyum Profil", unit="metre", unit_price=120.0),
|
| 34 |
+
Material(name="Bakır Kablo", unit="metre", unit_price=45.0),
|
| 35 |
+
Material(name="PLC Kontrol Ünitesi", unit="adet", unit_price=5000.0),
|
| 36 |
+
Material(name="Sensör", unit="adet", unit_price=750.0),
|
| 37 |
+
Material(name="Motor (1kW)", unit="adet", unit_price=2500.0),
|
| 38 |
+
Material(name="Rulman", unit="adet", unit_price=180.0),
|
| 39 |
+
]
|
| 40 |
+
db.add_all(materials)
|
| 41 |
+
|
| 42 |
+
# Seed margin profiles
|
| 43 |
+
margin_profiles = [
|
| 44 |
+
MarginProfile(profile_name="Standart", margin_percentage=0.15), # 15%
|
| 45 |
+
MarginProfile(profile_name="Düşük Rekabet", margin_percentage=0.25), # 25%
|
| 46 |
+
MarginProfile(profile_name="Yüksek Rekabet", margin_percentage=0.10), # 10%
|
| 47 |
+
MarginProfile(profile_name="Stratejik Müşteri", margin_percentage=0.12), # 12%
|
| 48 |
+
MarginProfile(profile_name="Yeni Pazar", margin_percentage=0.08), # 8%
|
| 49 |
+
]
|
| 50 |
+
db.add_all(margin_profiles)
|
| 51 |
+
|
| 52 |
+
# Commit changes
|
| 53 |
+
db.commit()
|
| 54 |
+
print("Database seeded successfully!")
|
| 55 |
+
|
| 56 |
+
except Exception as e:
|
| 57 |
+
db.rollback()
|
| 58 |
+
print(f"Error seeding database: {e}")
|
| 59 |
+
raise
|
| 60 |
+
finally:
|
| 61 |
+
db.close()
|
| 62 |
+
|
| 63 |
+
if __name__ == "__main__":
|
| 64 |
+
# Create tables if they don't exist
|
| 65 |
+
Base.metadata.create_all(bind=engine)
|
| 66 |
+
|
| 67 |
+
# Seed database
|
| 68 |
+
seed_database()
|
app/tools/__pycache__/labor_cost.cpython-311.pyc
ADDED
|
Binary file (3.86 kB). View file
|
|
|
app/tools/__pycache__/margin.cpython-311.pyc
ADDED
|
Binary file (3.87 kB). View file
|
|
|
app/tools/__pycache__/material_cost.cpython-311.pyc
ADDED
|
Binary file (3.97 kB). View file
|
|
|
app/tools/labor_cost.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain.tools import BaseTool
|
| 2 |
+
from pydantic import BaseModel, Field
|
| 3 |
+
from typing import Optional, Type
|
| 4 |
+
from sqlalchemy.orm import Session
|
| 5 |
+
from app.db.models import LaborRate
|
| 6 |
+
|
| 7 |
+
class LaborCostInput(BaseModel):
|
| 8 |
+
"""Input for labor cost calculation"""
|
| 9 |
+
job_type: str = Field(..., description="The type of job (e.g., welder, electrician, engineer)")
|
| 10 |
+
hours: float = Field(..., description="Number of hours worked")
|
| 11 |
+
|
| 12 |
+
class LaborCostTool(BaseTool):
|
| 13 |
+
"""Tool for calculating labor cost based on job type and hours worked"""
|
| 14 |
+
name: str = "labor_cost"
|
| 15 |
+
description: str = "Calculate labor cost by multiplying hours worked by the hourly rate for a specific job type"
|
| 16 |
+
args_schema: Type[BaseModel] = LaborCostInput
|
| 17 |
+
db: Session
|
| 18 |
+
|
| 19 |
+
def __init__(self, db: Session):
|
| 20 |
+
"""Initialize with database session"""
|
| 21 |
+
super().__init__()
|
| 22 |
+
self.db = db
|
| 23 |
+
|
| 24 |
+
def _run(self, job_type: str, hours: float) -> str:
|
| 25 |
+
"""Calculate labor cost"""
|
| 26 |
+
# Find the labor rate for the job type
|
| 27 |
+
labor_rate = self.db.query(LaborRate).filter(LaborRate.job_type == job_type).first()
|
| 28 |
+
|
| 29 |
+
if not labor_rate:
|
| 30 |
+
return f"Error: No labor rate found for job type '{job_type}'. Available job types: {', '.join([lr.job_type for lr in self.db.query(LaborRate).all()])}"
|
| 31 |
+
|
| 32 |
+
# Calculate the cost
|
| 33 |
+
cost = labor_rate.hourly_rate * hours
|
| 34 |
+
|
| 35 |
+
return f"Labor cost for {hours} hours of {job_type} work at {labor_rate.hourly_rate} {labor_rate.currency}/hour: {cost:.2f} {labor_rate.currency}"
|
| 36 |
+
|
| 37 |
+
async def _arun(self, job_type: str, hours: float) -> str:
|
| 38 |
+
"""Async implementation of labor cost calculation"""
|
| 39 |
+
return self._run(job_type=job_type, hours=hours)
|
app/tools/margin.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain.tools import BaseTool
|
| 2 |
+
from pydantic import BaseModel, Field
|
| 3 |
+
from typing import Optional, Type
|
| 4 |
+
from sqlalchemy.orm import Session
|
| 5 |
+
from app.db.models import MarginProfile
|
| 6 |
+
|
| 7 |
+
class MarginInput(BaseModel):
|
| 8 |
+
"""Input for margin calculation"""
|
| 9 |
+
total_cost: float = Field(..., description="The total cost to apply margin to")
|
| 10 |
+
profile_name: str = Field(..., description="The margin profile name to use")
|
| 11 |
+
|
| 12 |
+
class MarginTool(BaseTool):
|
| 13 |
+
"""Tool for applying margin to a total cost"""
|
| 14 |
+
name: str = "apply_margin"
|
| 15 |
+
description: str = "Apply a margin to a total cost based on a margin profile"
|
| 16 |
+
args_schema: Type[BaseModel] = MarginInput
|
| 17 |
+
db: Session
|
| 18 |
+
|
| 19 |
+
def __init__(self, db: Session):
|
| 20 |
+
"""Initialize with database session"""
|
| 21 |
+
super().__init__()
|
| 22 |
+
self.db = db
|
| 23 |
+
|
| 24 |
+
def _run(self, total_cost: float, profile_name: str) -> str:
|
| 25 |
+
"""Apply margin to total cost"""
|
| 26 |
+
# Find the margin profile by name
|
| 27 |
+
margin_profile = self.db.query(MarginProfile).filter(MarginProfile.profile_name == profile_name).first()
|
| 28 |
+
|
| 29 |
+
if not margin_profile:
|
| 30 |
+
return f"Error: No margin profile found with name '{profile_name}'. Available profiles: {', '.join([mp.profile_name for mp in self.db.query(MarginProfile).all()])}"
|
| 31 |
+
|
| 32 |
+
# Calculate the final price with margin
|
| 33 |
+
final_price = total_cost * (1 + margin_profile.margin_percentage)
|
| 34 |
+
margin_amount = final_price - total_cost
|
| 35 |
+
margin_percentage = margin_profile.margin_percentage * 100
|
| 36 |
+
|
| 37 |
+
return f"Applied {margin_percentage:.1f}% margin to {total_cost:.2f}: Final price is {final_price:.2f} (margin amount: {margin_amount:.2f})"
|
| 38 |
+
|
| 39 |
+
async def _arun(self, total_cost: float, profile_name: str) -> str:
|
| 40 |
+
"""Async implementation of margin application"""
|
| 41 |
+
return self._run(total_cost=total_cost, profile_name=profile_name)
|
app/tools/material_cost.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain.tools import BaseTool
|
| 2 |
+
from pydantic import BaseModel, Field
|
| 3 |
+
from typing import Optional, Type
|
| 4 |
+
from sqlalchemy.orm import Session
|
| 5 |
+
from app.db.models import Material
|
| 6 |
+
|
| 7 |
+
class MaterialCostInput(BaseModel):
|
| 8 |
+
"""Input for material cost calculation"""
|
| 9 |
+
material_name: str = Field(..., description="The name of the material")
|
| 10 |
+
quantity: float = Field(..., description="Quantity of material needed")
|
| 11 |
+
|
| 12 |
+
class MaterialCostTool(BaseTool):
|
| 13 |
+
"""Tool for calculating material cost based on material name and quantity"""
|
| 14 |
+
name: str = "material_cost"
|
| 15 |
+
description: str = "Calculate material cost by multiplying the unit price by the quantity for a specific material"
|
| 16 |
+
args_schema: Type[BaseModel] = MaterialCostInput
|
| 17 |
+
db: Session
|
| 18 |
+
|
| 19 |
+
def __init__(self, db: Session):
|
| 20 |
+
"""Initialize with database session"""
|
| 21 |
+
super().__init__()
|
| 22 |
+
self.db = db
|
| 23 |
+
|
| 24 |
+
def _run(self, material_name: str, quantity: float) -> str:
|
| 25 |
+
"""Calculate material cost"""
|
| 26 |
+
# Find the material by name
|
| 27 |
+
material = self.db.query(Material).filter(Material.name == material_name).first()
|
| 28 |
+
|
| 29 |
+
if not material:
|
| 30 |
+
return f"Error: No material found with name '{material_name}'. Available materials: {', '.join([m.name for m in self.db.query(Material).all()])}"
|
| 31 |
+
|
| 32 |
+
# Calculate the cost
|
| 33 |
+
cost = material.unit_price * quantity
|
| 34 |
+
|
| 35 |
+
return f"Material cost for {quantity} {material.unit} of {material_name} at {material.unit_price} {material.currency}/{material.unit}: {cost:.2f} {material.currency}"
|
| 36 |
+
|
| 37 |
+
async def _arun(self, material_name: str, quantity: float) -> str:
|
| 38 |
+
"""Async implementation of material cost calculation"""
|
| 39 |
+
return self._run(material_name=material_name, quantity=quantity)
|
config.json
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "HuggingFaceH4/zephyr-7b-beta",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"MistralForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 1,
|
| 8 |
+
"eos_token_id": 2,
|
| 9 |
+
"hidden_act": "silu",
|
| 10 |
+
"hidden_size": 4096,
|
| 11 |
+
"initializer_range": 0.02,
|
| 12 |
+
"intermediate_size": 14336,
|
| 13 |
+
"max_position_embeddings": 32768,
|
| 14 |
+
"model_type": "mistral",
|
| 15 |
+
"num_attention_heads": 32,
|
| 16 |
+
"num_hidden_layers": 32,
|
| 17 |
+
"num_key_value_heads": 8,
|
| 18 |
+
"rms_norm_eps": 1e-05,
|
| 19 |
+
"rope_theta": 10000.0,
|
| 20 |
+
"sliding_window": 4096,
|
| 21 |
+
"tie_word_embeddings": false,
|
| 22 |
+
"torch_dtype": "bfloat16",
|
| 23 |
+
"transformers_version": "4.34.0",
|
| 24 |
+
"use_cache": true,
|
| 25 |
+
"vocab_size": 32000,
|
| 26 |
+
"custom_pipeline_tag": "industrial-cost-calculation",
|
| 27 |
+
"task_specific": {
|
| 28 |
+
"domain": "industrial-cost-calculation",
|
| 29 |
+
"language": "tr",
|
| 30 |
+
"tools": [
|
| 31 |
+
"labor_cost",
|
| 32 |
+
"material_cost",
|
| 33 |
+
"margin_calculation"
|
| 34 |
+
],
|
| 35 |
+
"features": [
|
| 36 |
+
"document_processing",
|
| 37 |
+
"cost_analysis",
|
| 38 |
+
"report_generation"
|
| 39 |
+
]
|
| 40 |
+
}
|
| 41 |
+
}
|
docker-compose.yml
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version: '3.8'
|
| 2 |
+
|
| 3 |
+
services:
|
| 4 |
+
db:
|
| 5 |
+
image: postgres:15
|
| 6 |
+
environment:
|
| 7 |
+
POSTGRES_USER: user
|
| 8 |
+
POSTGRES_PASSWORD: pass
|
| 9 |
+
POSTGRES_DB: costdb
|
| 10 |
+
volumes:
|
| 11 |
+
- postgres_data:/var/lib/postgresql/data
|
| 12 |
+
ports:
|
| 13 |
+
- "5432:5432"
|
| 14 |
+
healthcheck:
|
| 15 |
+
test: ["CMD-SHELL", "pg_isready -U user -d costdb"]
|
| 16 |
+
interval: 5s
|
| 17 |
+
timeout: 5s
|
| 18 |
+
retries: 5
|
| 19 |
+
|
| 20 |
+
app:
|
| 21 |
+
build: .
|
| 22 |
+
volumes:
|
| 23 |
+
- .:/app
|
| 24 |
+
ports:
|
| 25 |
+
- "8000:8000"
|
| 26 |
+
environment:
|
| 27 |
+
- DATABASE_URL=postgresql://user:pass@db:5432/costdb
|
| 28 |
+
- MODEL_NAME=bigscience/bloomz-560m
|
| 29 |
+
depends_on:
|
| 30 |
+
db:
|
| 31 |
+
condition: service_healthy
|
| 32 |
+
|
| 33 |
+
volumes:
|
| 34 |
+
postgres_data:
|
example_usage.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Endüstri Chatbot Kullanım Örneği
|
| 4 |
+
|
| 5 |
+
Bu dosya, Endüstri Chatbot'un nasıl kullanılacağını gösterir.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import requests
|
| 9 |
+
import json
|
| 10 |
+
from typing import Dict, Any
|
| 11 |
+
|
| 12 |
+
class EndustriChatbotClient:
|
| 13 |
+
def __init__(self, base_url: str = "http://localhost:8000"):
|
| 14 |
+
self.base_url = base_url
|
| 15 |
+
|
| 16 |
+
def chat(self, message: str) -> str:
|
| 17 |
+
"""Chatbot ile sohbet et"""
|
| 18 |
+
response = requests.post(
|
| 19 |
+
f"{self.base_url}/chat",
|
| 20 |
+
json={"message": message}
|
| 21 |
+
)
|
| 22 |
+
return response.json()["response"]
|
| 23 |
+
|
| 24 |
+
def upload_document(self, file_path: str, analyze: bool = True) -> Dict[str, Any]:
|
| 25 |
+
"""Doküman yükle ve analiz et"""
|
| 26 |
+
with open(file_path, "rb") as f:
|
| 27 |
+
response = requests.post(
|
| 28 |
+
f"{self.base_url}/documents/upload",
|
| 29 |
+
files={"file": f},
|
| 30 |
+
data={"analyze": str(analyze).lower()}
|
| 31 |
+
)
|
| 32 |
+
return response.json()
|
| 33 |
+
|
| 34 |
+
def generate_report(self, document_type: str, template_type: str, data: Dict[str, Any]) -> Dict[str, Any]:
|
| 35 |
+
"""Rapor oluştur"""
|
| 36 |
+
response = requests.post(
|
| 37 |
+
f"{self.base_url}/documents/generate",
|
| 38 |
+
json={
|
| 39 |
+
"document_type": document_type,
|
| 40 |
+
"template_type": template_type,
|
| 41 |
+
"data": data
|
| 42 |
+
}
|
| 43 |
+
)
|
| 44 |
+
return response.json()
|
| 45 |
+
|
| 46 |
+
def get_health(self) -> Dict[str, Any]:
|
| 47 |
+
"""Sistem durumunu kontrol et"""
|
| 48 |
+
response = requests.get(f"{self.base_url}/health")
|
| 49 |
+
return response.json()
|
| 50 |
+
|
| 51 |
+
def main():
|
| 52 |
+
# Client oluştur
|
| 53 |
+
client = EndustriChatbotClient()
|
| 54 |
+
|
| 55 |
+
# Sistem durumunu kontrol et
|
| 56 |
+
print("Sistem durumu:", client.get_health())
|
| 57 |
+
|
| 58 |
+
# Basit sohbet örnekleri
|
| 59 |
+
questions = [
|
| 60 |
+
"5 saat kaynakçı işçiliği ne kadar tutar?",
|
| 61 |
+
"10 metre bakır kablo maliyeti nedir?",
|
| 62 |
+
"2500 TL'lik bir işe standart marj uygularsak fiyat ne olur?"
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
for question in questions:
|
| 66 |
+
print(f"\nSoru: {question}")
|
| 67 |
+
answer = client.chat(question)
|
| 68 |
+
print(f"Cevap: {answer}")
|
| 69 |
+
|
| 70 |
+
# Rapor oluşturma örneği
|
| 71 |
+
report_data = {
|
| 72 |
+
"proje_adi": "Fabrika Kurulumu",
|
| 73 |
+
"referans_no": "PRJ-2024-001",
|
| 74 |
+
"iscilik_maliyeti": 15000,
|
| 75 |
+
"malzeme_maliyeti": 25000,
|
| 76 |
+
"toplam_maliyet": 40000,
|
| 77 |
+
"kar_marji": 20,
|
| 78 |
+
"toplam_teklif": 48000,
|
| 79 |
+
"notlar": "Bu örnek bir maliyet hesaplamasıdır."
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
print("\nMaliyet raporu oluşturuluyor...")
|
| 83 |
+
report_result = client.generate_report("word", "maliyet_raporu", report_data)
|
| 84 |
+
print(f"Rapor oluşturuldu: {report_result}")
|
| 85 |
+
|
| 86 |
+
if __name__ == "__main__":
|
| 87 |
+
main()
|
generation_config.json
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 1,
|
| 4 |
+
"eos_token_id": 2,
|
| 5 |
+
"max_length": 32768,
|
| 6 |
+
"max_new_tokens": 512,
|
| 7 |
+
"min_length": 0,
|
| 8 |
+
"min_new_tokens": null,
|
| 9 |
+
"early_stopping": false,
|
| 10 |
+
"max_time": null,
|
| 11 |
+
"do_sample": true,
|
| 12 |
+
"num_beams": 1,
|
| 13 |
+
"num_beam_groups": 1,
|
| 14 |
+
"penalty_alpha": null,
|
| 15 |
+
"use_cache": true,
|
| 16 |
+
"temperature": 0.7,
|
| 17 |
+
"top_k": 50,
|
| 18 |
+
"top_p": 0.95,
|
| 19 |
+
"typical_p": 1.0,
|
| 20 |
+
"epsilon_cutoff": 0.0,
|
| 21 |
+
"eta_cutoff": 0.0,
|
| 22 |
+
"diversity_penalty": 0.0,
|
| 23 |
+
"repetition_penalty": 1.15,
|
| 24 |
+
"encoder_repetition_penalty": 1.0,
|
| 25 |
+
"length_penalty": 1.0,
|
| 26 |
+
"no_repeat_ngram_size": 0,
|
| 27 |
+
"bad_words_ids": null,
|
| 28 |
+
"force_words_ids": null,
|
| 29 |
+
"renormalize_logits": false,
|
| 30 |
+
"constraints": null,
|
| 31 |
+
"forced_bos_token_id": null,
|
| 32 |
+
"forced_eos_token_id": null,
|
| 33 |
+
"remove_invalid_values": false,
|
| 34 |
+
"exponential_decay_length_penalty": null,
|
| 35 |
+
"suppress_tokens": null,
|
| 36 |
+
"begin_suppress_tokens": null,
|
| 37 |
+
"forced_decoder_ids": null,
|
| 38 |
+
"sequence_bias": null,
|
| 39 |
+
"guidance_scale": null,
|
| 40 |
+
"low_memory": null,
|
| 41 |
+
"num_return_sequences": 1,
|
| 42 |
+
"output_attentions": false,
|
| 43 |
+
"output_hidden_states": false,
|
| 44 |
+
"output_scores": false,
|
| 45 |
+
"return_dict_in_generate": false,
|
| 46 |
+
"pad_token_id": null,
|
| 47 |
+
"decoder_start_token_id": null,
|
| 48 |
+
"transformers_version": "4.34.0",
|
| 49 |
+
"task_specific_params": {
|
| 50 |
+
"industrial_cost_calculation": {
|
| 51 |
+
"temperature": 0.7,
|
| 52 |
+
"top_p": 0.95,
|
| 53 |
+
"repetition_penalty": 1.15,
|
| 54 |
+
"max_new_tokens": 512,
|
| 55 |
+
"do_sample": true
|
| 56 |
+
},
|
| 57 |
+
"document_analysis": {
|
| 58 |
+
"temperature": 0.3,
|
| 59 |
+
"top_p": 0.9,
|
| 60 |
+
"repetition_penalty": 1.1,
|
| 61 |
+
"max_new_tokens": 1024,
|
| 62 |
+
"do_sample": true
|
| 63 |
+
},
|
| 64 |
+
"report_generation": {
|
| 65 |
+
"temperature": 0.5,
|
| 66 |
+
"top_p": 0.9,
|
| 67 |
+
"repetition_penalty": 1.2,
|
| 68 |
+
"max_new_tokens": 2048,
|
| 69 |
+
"do_sample": true
|
| 70 |
+
}
|
| 71 |
+
}
|
| 72 |
+
}
|
model_index.json
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_class_name": "EndustriChatbotPipeline",
|
| 3 |
+
"_diffusers_version": "0.21.0",
|
| 4 |
+
"_name_or_path": "HuggingFaceH4/zephyr-7b-beta",
|
| 5 |
+
"model_type": "industrial-cost-calculation",
|
| 6 |
+
"pipeline_tag": "text-generation",
|
| 7 |
+
"library_name": "transformers",
|
| 8 |
+
"tags": [
|
| 9 |
+
"industrial",
|
| 10 |
+
"cost-calculation",
|
| 11 |
+
"chatbot",
|
| 12 |
+
"langchain",
|
| 13 |
+
"fastapi",
|
| 14 |
+
"document-processing",
|
| 15 |
+
"turkish",
|
| 16 |
+
"english"
|
| 17 |
+
],
|
| 18 |
+
"language": ["tr", "en"],
|
| 19 |
+
"license": "apache-2.0",
|
| 20 |
+
"base_model": "HuggingFaceH4/zephyr-7b-beta",
|
| 21 |
+
"model_name": "Endüstri Chatbot",
|
| 22 |
+
"model_description": "Industrial cost calculation AI chatbot with document processing capabilities",
|
| 23 |
+
"version": "1.0.0",
|
| 24 |
+
"framework": {
|
| 25 |
+
"name": "transformers",
|
| 26 |
+
"version": "4.34.0"
|
| 27 |
+
},
|
| 28 |
+
"components": {
|
| 29 |
+
"text_encoder": {
|
| 30 |
+
"class_name": "LlamaTokenizer",
|
| 31 |
+
"config_file": "tokenizer_config.json"
|
| 32 |
+
},
|
| 33 |
+
"text_decoder": {
|
| 34 |
+
"class_name": "MistralForCausalLM",
|
| 35 |
+
"config_file": "config.json"
|
| 36 |
+
},
|
| 37 |
+
"agent_executor": {
|
| 38 |
+
"class_name": "LangChainAgent",
|
| 39 |
+
"tools": [
|
| 40 |
+
"labor_cost",
|
| 41 |
+
"material_cost",
|
| 42 |
+
"margin_calculation"
|
| 43 |
+
]
|
| 44 |
+
},
|
| 45 |
+
"document_processor": {
|
| 46 |
+
"class_name": "DocumentProcessor",
|
| 47 |
+
"supported_formats": [
|
| 48 |
+
"pdf",
|
| 49 |
+
"docx",
|
| 50 |
+
"xlsx",
|
| 51 |
+
"images"
|
| 52 |
+
]
|
| 53 |
+
},
|
| 54 |
+
"report_generator": {
|
| 55 |
+
"class_name": "DocumentGenerator",
|
| 56 |
+
"output_formats": [
|
| 57 |
+
"word",
|
| 58 |
+
"excel"
|
| 59 |
+
],
|
| 60 |
+
"templates": [
|
| 61 |
+
"maliyet_raporu",
|
| 62 |
+
"teklif"
|
| 63 |
+
]
|
| 64 |
+
}
|
| 65 |
+
},
|
| 66 |
+
"inference": {
|
| 67 |
+
"parameters": {
|
| 68 |
+
"max_new_tokens": 512,
|
| 69 |
+
"temperature": 0.7,
|
| 70 |
+
"top_p": 0.95,
|
| 71 |
+
"repetition_penalty": 1.15,
|
| 72 |
+
"do_sample": true
|
| 73 |
+
},
|
| 74 |
+
"hardware_requirements": {
|
| 75 |
+
"min_ram": "8GB",
|
| 76 |
+
"recommended_ram": "16GB",
|
| 77 |
+
"gpu": "optional",
|
| 78 |
+
"gpu_memory": "8GB+"
|
| 79 |
+
}
|
| 80 |
+
},
|
| 81 |
+
"deployment": {
|
| 82 |
+
"docker": {
|
| 83 |
+
"image": "endustri-chatbot:latest",
|
| 84 |
+
"port": 8000,
|
| 85 |
+
"environment": {
|
| 86 |
+
"MODEL_NAME": "HuggingFaceH4/zephyr-7b-beta",
|
| 87 |
+
"DATABASE_URL": "sqlite:///./chatbot.db"
|
| 88 |
+
}
|
| 89 |
+
},
|
| 90 |
+
"api": {
|
| 91 |
+
"framework": "fastapi",
|
| 92 |
+
"endpoints": [
|
| 93 |
+
"/chat",
|
| 94 |
+
"/documents/upload",
|
| 95 |
+
"/documents/generate",
|
| 96 |
+
"/documents/templates",
|
| 97 |
+
"/health"
|
| 98 |
+
]
|
| 99 |
+
}
|
| 100 |
+
},
|
| 101 |
+
"training": {
|
| 102 |
+
"base_model": "HuggingFaceH4/zephyr-7b-beta",
|
| 103 |
+
"fine_tuning": "none",
|
| 104 |
+
"domain_adaptation": "industrial-cost-calculation",
|
| 105 |
+
"languages": ["tr", "en"],
|
| 106 |
+
"dataset_size": "proprietary"
|
| 107 |
+
},
|
| 108 |
+
"evaluation": {
|
| 109 |
+
"metrics": {
|
| 110 |
+
"cost_calculation_accuracy": 0.95,
|
| 111 |
+
"response_time_avg": "1.5s",
|
| 112 |
+
"document_processing_success": 0.92,
|
| 113 |
+
"turkish_language_support": 0.98
|
| 114 |
+
},
|
| 115 |
+
"benchmarks": {
|
| 116 |
+
"industrial_cost_dataset": {
|
| 117 |
+
"accuracy": 0.95,
|
| 118 |
+
"f1_score": 0.93
|
| 119 |
+
},
|
| 120 |
+
"document_analysis_dataset": {
|
| 121 |
+
"precision": 0.91,
|
| 122 |
+
"recall": 0.89
|
| 123 |
+
}
|
| 124 |
+
}
|
| 125 |
+
},
|
| 126 |
+
"usage": {
|
| 127 |
+
"primary_use_cases": [
|
| 128 |
+
"Industrial cost calculation",
|
| 129 |
+
"Document analysis and processing",
|
| 130 |
+
"Automated report generation",
|
| 131 |
+
"Cost estimation and quotation"
|
| 132 |
+
],
|
| 133 |
+
"supported_industries": [
|
| 134 |
+
"Manufacturing",
|
| 135 |
+
"Construction",
|
| 136 |
+
"Engineering",
|
| 137 |
+
"Project Management"
|
| 138 |
+
]
|
| 139 |
+
},
|
| 140 |
+
"limitations": [
|
| 141 |
+
"Specialized for industrial cost calculations",
|
| 142 |
+
"Primarily optimized for Turkish language",
|
| 143 |
+
"Requires domain-specific knowledge for accurate results",
|
| 144 |
+
"Performance depends on quality of input documents"
|
| 145 |
+
],
|
| 146 |
+
"ethical_considerations": {
|
| 147 |
+
"bias": "Model may have bias towards Turkish industrial practices",
|
| 148 |
+
"privacy": "Processes potentially sensitive business documents",
|
| 149 |
+
"transparency": "Cost calculations are explainable through tool usage"
|
| 150 |
+
},
|
| 151 |
+
"contact": {
|
| 152 |
+
"repository": "https://github.com/your-username/EndüstriChatbot",
|
| 153 |
+
"issues": "https://github.com/your-username/EndüstriChatbot/issues",
|
| 154 |
+
"documentation": "https://github.com/your-username/EndüstriChatbot/blob/main/README.md"
|
| 155 |
+
}
|
| 156 |
+
}
|
model_metadata.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"model_name": "Endüstri Chatbot",
|
| 3 |
+
"version": "1.0.0",
|
| 4 |
+
"description": "Industrial cost calculation AI chatbot with document processing capabilities",
|
| 5 |
+
"base_model": "HuggingFaceH4/zephyr-7b-beta",
|
| 6 |
+
"framework": "LangChain + FastAPI",
|
| 7 |
+
"language": [
|
| 8 |
+
"tr",
|
| 9 |
+
"en"
|
| 10 |
+
],
|
| 11 |
+
"license": "apache-2.0",
|
| 12 |
+
"tags": [
|
| 13 |
+
"industrial",
|
| 14 |
+
"cost-calculation",
|
| 15 |
+
"chatbot",
|
| 16 |
+
"langchain",
|
| 17 |
+
"fastapi",
|
| 18 |
+
"document-processing",
|
| 19 |
+
"turkish",
|
| 20 |
+
"english"
|
| 21 |
+
],
|
| 22 |
+
"tools": [
|
| 23 |
+
"labor_cost",
|
| 24 |
+
"material_cost",
|
| 25 |
+
"margin_calculation"
|
| 26 |
+
],
|
| 27 |
+
"features": [
|
| 28 |
+
"document_processing",
|
| 29 |
+
"cost_analysis",
|
| 30 |
+
"report_generation",
|
| 31 |
+
"turkish_language_support"
|
| 32 |
+
]
|
| 33 |
+
}
|
requirements.txt
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi
|
| 2 |
+
uvicorn[standard]
|
| 3 |
+
sqlalchemy
|
| 4 |
+
psycopg2-binary
|
| 5 |
+
alembic
|
| 6 |
+
langchain
|
| 7 |
+
langchain_community
|
| 8 |
+
transformers>=4.34.0
|
| 9 |
+
huggingface_hub
|
| 10 |
+
python-multipart
|
| 11 |
+
pytesseract
|
| 12 |
+
pdf2image
|
| 13 |
+
pillow
|
| 14 |
+
python-docx
|
| 15 |
+
pydantic
|
| 16 |
+
python-dotenv
|
| 17 |
+
torch
|
| 18 |
+
numpy
|
| 19 |
+
accelerate
|
| 20 |
+
bitsandbytes
|
| 21 |
+
sentencepiece
|
| 22 |
+
protobuf
|
start_app.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
import subprocess
|
| 4 |
+
|
| 5 |
+
def check_environment():
|
| 6 |
+
"""Check if the environment is properly set up"""
|
| 7 |
+
print("Checking environment...")
|
| 8 |
+
|
| 9 |
+
# Check if .env file exists
|
| 10 |
+
if not os.path.exists(".env"):
|
| 11 |
+
print("Warning: .env file not found. Creating from .env.example...")
|
| 12 |
+
if os.path.exists(".env.example"):
|
| 13 |
+
with open(".env.example", "r") as example_file:
|
| 14 |
+
with open(".env", "w") as env_file:
|
| 15 |
+
env_file.write(example_file.read())
|
| 16 |
+
print(".env file created successfully.")
|
| 17 |
+
else:
|
| 18 |
+
print("Error: .env.example file not found. Please create a .env file manually.")
|
| 19 |
+
return False
|
| 20 |
+
|
| 21 |
+
# Check if required packages are installed
|
| 22 |
+
try:
|
| 23 |
+
import fastapi
|
| 24 |
+
import uvicorn
|
| 25 |
+
import langchain
|
| 26 |
+
import transformers
|
| 27 |
+
import torch
|
| 28 |
+
print("All required packages are installed.")
|
| 29 |
+
except ImportError as e:
|
| 30 |
+
print(f"Error: Missing required package: {e}")
|
| 31 |
+
print("Please run: pip install -r requirements.txt")
|
| 32 |
+
return False
|
| 33 |
+
|
| 34 |
+
return True
|
| 35 |
+
|
| 36 |
+
def start_application():
|
| 37 |
+
"""Start the FastAPI application"""
|
| 38 |
+
if not check_environment():
|
| 39 |
+
print("Environment check failed. Please fix the issues and try again.")
|
| 40 |
+
return
|
| 41 |
+
|
| 42 |
+
print("\nStarting the application...")
|
| 43 |
+
print("The application will be available at http://localhost:8000")
|
| 44 |
+
print("Press Ctrl+C to stop the application.")
|
| 45 |
+
|
| 46 |
+
# Start the application using uvicorn
|
| 47 |
+
try:
|
| 48 |
+
subprocess.run([sys.executable, "-m", "uvicorn", "app.main:app", "--reload", "--host", "0.0.0.0", "--port", "8000"])
|
| 49 |
+
except KeyboardInterrupt:
|
| 50 |
+
print("\nApplication stopped.")
|
| 51 |
+
|
| 52 |
+
if __name__ == "__main__":
|
| 53 |
+
start_application()
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"0": {
|
| 6 |
+
"content": "<unk>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"1": {
|
| 14 |
+
"content": "<s>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"2": {
|
| 22 |
+
"content": "</s>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
}
|
| 29 |
+
},
|
| 30 |
+
"additional_special_tokens": [],
|
| 31 |
+
"bos_token": "<s>",
|
| 32 |
+
"chat_template": "<|system|>\n{{ system_message }}</s>\n<|user|>\n{{ user_message }}</s>\n<|assistant|>\n{{ assistant_message }}</s>\n",
|
| 33 |
+
"clean_up_tokenization_spaces": false,
|
| 34 |
+
"eos_token": "</s>",
|
| 35 |
+
"legacy": true,
|
| 36 |
+
"model_max_length": 32768,
|
| 37 |
+
"pad_token": null,
|
| 38 |
+
"sp_model_kwargs": {},
|
| 39 |
+
"spaces_between_special_tokens": false,
|
| 40 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 41 |
+
"unk_token": "<unk>",
|
| 42 |
+
"use_default_system_prompt": false,
|
| 43 |
+
"custom_tokens": {
|
| 44 |
+
"industrial_terms": [
|
| 45 |
+
"işçilik",
|
| 46 |
+
"malzeme",
|
| 47 |
+
"maliyet",
|
| 48 |
+
"marj",
|
| 49 |
+
"teklif",
|
| 50 |
+
"kaynakçı",
|
| 51 |
+
"teknisyen",
|
| 52 |
+
"mühendis",
|
| 53 |
+
"bakır",
|
| 54 |
+
"kablo",
|
| 55 |
+
"çelik",
|
| 56 |
+
"beton"
|
| 57 |
+
]
|
| 58 |
+
}
|
| 59 |
+
}
|