sudo apt update && sudo apt install python3 python3-pippip install torch transformers accelerate
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "deepseek-ai/deepseek-llm-7b"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
input_text = "Как автоматизировать отчётность в компании?"inputs = tokenizer(input_text, return_tensors="pt").to("cuda")outputs = model.generate(**inputs, max_length=200)print(tokenizer.decode(outputs[0], skip_special_tokens=True))