Direkt zum Inhalt

Aktualisierte 19. Nov. 2024  · 12 Min. Lesezeit

KI-Anwendungen entwickeln

Lerne, wie man KI-Anwendungen mit der OpenAI API erstellt.
Start Upskilling for Free

 

!pip install BeautifulSoup

ollama run llama3.1 

import time
import requests
from bs4 import BeautifulSoup
import ollama

def fetch_article_content(url, retries=3, content_limit=None):
    attempt = 0
    while attempt < retries:
        try:
            response = requests.get(url, timeout=30)  # Add timeout to prevent hanging
            response.raise_for_status()  # Raise an error for bad responses (4xx, 5xx)
            soup = BeautifulSoup(response.content, 'html.parser')
            # Remove script and style elements
            for script in soup(["script", "style"]):
                script.decompose()
            # Get text
            text = soup.get_text()
            # Break into lines and remove leading and trailing space on each
            lines = (line.strip() for line in text.splitlines())
            chunks = (phrase.strip() for line in lines for phrase in line.split("  "))
            text = '\n'.join(chunk for chunk in chunks if chunk)
            # Limit content if specified
            if content_limit:
                text = text[:content_limit]
            return text
  except (requests.exceptions.RequestException, requests.exceptions.ChunkedEncodingError) as e:
            attempt += 1
            print(f"Attempt {attempt} failed: {e}")
            if attempt >= retries:
                raise
            time.sleep(2)  # Wait before retrying

book_url = "https://www.deeplearningbook.org/contents/intro.html"
book_content = fetch_article_content(book_url, content_limit=200000)
print(f"Fetched {len(book_content)} characters from the book.")
print("First 500 characters:")
print(book_content[:500])

 

 

 

def make_non_cached_inference(prompt):
    start_time = time.time()
    response = ollama.generate(model="llama3", prompt=prompt)
    end_time = time.time()
    # Print the response to inspect its structure
    print(f"Full response: {response}")
    # Access the generated summary
    generated_summary = response.get('response', 'No summary found in response')  
    return generated_summary, end_time - start_time

def make_cached_inference(prompt, cache):
    if prompt in cache:
        print("Using cached result...")
        return cache[prompt], 0  # Return cached response with 0 latency
    start_time = time.time()
    response = ollama.generate(model="llama3", prompt=prompt)
    end_time = time.time()
    # Print the response to inspect its structure (if needed)
    print(f"Full response (cached call): {response}")
    # Access the generated summary
    generated_summary = response.get('response', 'No summary found in response')
    cache[prompt] = generated_summary  # Store result in cache
    return generated_summary, end_time - start_time

prompt = f"<book>{book_content[:10000]}</book>\nPlease provide a summary of this book."

 

non_cached_response, non_cached_time = make_non_cached_inference(prompt)
print(f"Non-cached inference time: {non_cached_time:.2f} seconds")
print(f"Non-cached summary: {non_cached_response}")
cache = {}

# Cached inference to generate a summary
cached_response, cached_time = make_cached_inference(prompt, cache)
print(f"Cached inference time: {cached_time:.2f} seconds")
print(f"Cached summary: {cached_response}")

Llama2

Llama3

Gemma2

Phi3

 

Verdiene eine Top-KI-Zertifizierung

Zeige, dass du KI effektiv und verantwortungsbewusst einsetzen kannst.

Photo of Aashi Dutt
Author
Aashi Dutt
LinkedIn
Twitter

Ich bin ein Google Developers Expert in ML (Gen AI), ein Kaggle 3x Expert und ein Women Techmakers Ambassador mit mehr als 3 Jahren Erfahrung im Tech-Bereich. Ich habe 2020 ein Startup im Bereich Gesundheitstechnologie mitbegründet und mache einen Master in Informatik an der Georgia Tech, der sich auf maschinelles Lernen spezialisiert.

Themen

Lernpfad

Developing AI Applications

23hrs hr
Learn to create AI-powered applications with the latest AI developer tools, including the OpenAI API, Hugging Face, and LangChain.
Siehe DetailsRight Arrow
Kurs Starten
Mehr anzeigenRight Arrow