2024-06-15 20:04:10 +08:00
|
|
|
import asyncio
|
2024-12-05 12:11:28 +08:00
|
|
|
from agents import pipeline, pb, logger
|
2024-06-13 21:08:58 +08:00
|
|
|
|
2024-06-21 10:05:33 +08:00
|
|
|
counter = 1
|
2024-06-13 21:08:58 +08:00
|
|
|
|
|
|
|
|
2024-06-15 20:04:10 +08:00
|
|
|
async def process_site(site, counter):
|
|
|
|
if not site['per_hours'] or not site['url']:
|
2024-06-13 21:08:58 +08:00
|
|
|
return
|
2024-06-15 20:04:10 +08:00
|
|
|
if counter % site['per_hours'] == 0:
|
|
|
|
logger.info(f"applying {site['url']}")
|
2024-06-22 16:47:13 +08:00
|
|
|
await pipeline(site['url'].rstrip('/'))
|
2024-06-15 20:04:10 +08:00
|
|
|
|
|
|
|
|
|
|
|
async def schedule_pipeline(interval):
|
|
|
|
global counter
|
|
|
|
while True:
|
|
|
|
sites = pb.read('sites', filter='activated=True')
|
2024-06-21 10:05:33 +08:00
|
|
|
logger.info(f'task execute loop {counter}')
|
2024-06-15 20:04:10 +08:00
|
|
|
await asyncio.gather(*[process_site(site, counter) for site in sites])
|
2024-06-13 21:08:58 +08:00
|
|
|
|
2024-06-15 20:04:10 +08:00
|
|
|
counter += 1
|
|
|
|
logger.info(f'task execute loop finished, work after {interval} seconds')
|
|
|
|
await asyncio.sleep(interval)
|
2024-06-13 21:08:58 +08:00
|
|
|
|
|
|
|
|
2024-06-15 20:04:10 +08:00
|
|
|
async def main():
|
|
|
|
interval_hours = 1
|
|
|
|
interval_seconds = interval_hours * 60 * 60
|
|
|
|
await schedule_pipeline(interval_seconds)
|
2024-06-13 21:08:58 +08:00
|
|
|
|
2024-06-15 20:04:10 +08:00
|
|
|
asyncio.run(main())
|