from django.http import HttpResponse
from django.views.decorators.http import require_GET
from django.views.decorators.cache import cache_control
import os

@require_GET
@cache_control(max_age=86400, public=True)  # Cache for 24 hours
def robots_txt(request):
    # Get the path to the robots.txt file
    robots_file_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static', 'robots.txt')
    
    # Read the contents of the robots.txt file
    with open(robots_file_path, 'r') as f:
        content = f.read()
    
    # Create response with appropriate content type
    response = HttpResponse(content, content_type='text/plain')
    
    # Add additional headers to ensure proper handling
    response['X-Robots-Tag'] = 'all'
    response['Cache-Control'] = 'public, max-age=86400'  # 24 hours
    
    return response
