This example demonstrates a real automation workflow using Wikipedia, a site that’s stable and doesn’t have aggressive bot detection.
from tzafon import Computerclient = Computer()with client.create(kind="browser") as computer: # Navigate to Wikipedia computer.navigate("https://wikipedia.org") computer.wait(2) # Take initial screenshot result = computer.screenshot() print(f"Homepage: {computer.get_screenshot_url(result)}") # Click search box (coordinates may vary by viewport) # Use screenshot to find the right coordinates for your setup computer.click(500, 250) computer.wait(0.5) # Type search query computer.type("Claude AI") computer.hotkey("enter") computer.wait(3) # Capture search results result = computer.screenshot() print(f"Results: {computer.get_screenshot_url(result)}") # Scroll down to see more content computer.scroll(dx=0, dy=500) computer.wait(1) # Final screenshot result = computer.screenshot() print(f"Scrolled: {computer.get_screenshot_url(result)}")
from tzafon import Computerclient = Computer()with client.create(kind="browser") as computer: computer.navigate("https://wikipedia.org") computer.wait(2) # Get page HTML result = computer.html() html_content = computer.get_html_content(result) # Process the HTML if html_content: print(f"Page length: {len(html_content)} characters") # Use BeautifulSoup, lxml, or similar to parse