Update main.py
Browse files
main.py
CHANGED
|
@@ -11,6 +11,13 @@ from bs4 import BeautifulSoup
|
|
| 11 |
import os
|
| 12 |
import requests
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
app = FastAPI()
|
| 15 |
app.add_middleware(
|
| 16 |
CORSMiddleware,
|
|
@@ -40,7 +47,7 @@ async def power_scrapper(url):
|
|
| 40 |
page_content = []
|
| 41 |
for link in links:
|
| 42 |
href = await link.get_attribute('href')
|
| 43 |
-
|
| 44 |
|
| 45 |
# Extract all text content
|
| 46 |
elements = await page.query_selector_all('body *')
|
|
|
|
| 11 |
import os
|
| 12 |
import requests
|
| 13 |
|
| 14 |
+
try: from pip._internal.operations import freeze
|
| 15 |
+
except ImportError: # pip < 10.0
|
| 16 |
+
from pip.operations import freeze
|
| 17 |
+
|
| 18 |
+
pkgs = freeze.freeze()
|
| 19 |
+
for pkg in pkgs: print(pkg)
|
| 20 |
+
|
| 21 |
app = FastAPI()
|
| 22 |
app.add_middleware(
|
| 23 |
CORSMiddleware,
|
|
|
|
| 47 |
page_content = []
|
| 48 |
for link in links:
|
| 49 |
href = await link.get_attribute('href')
|
| 50 |
+
page_url.append(href)
|
| 51 |
|
| 52 |
# Extract all text content
|
| 53 |
elements = await page.query_selector_all('body *')
|