Nimesh Kumar
02/02/2023, 11:45 AMfrom prefect.client import get_client
from datetime import datetime
import asyncio
import random
import uuid
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print("Current Time =", current_time)
async def trigger_flow_runs():
client = get_client()
print(client, ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
d_id_list = ["ddf0ae5c-7ed4-48b2-b417-535d767933f2"]
choosen_queue = random.choice(d_id_list)
print("Deployment ID ", choosen_queue)
param = {"my_param": str(uuid.uuid4()), "file_path": "root/flows/tmp_folder"}
print(param)
t = await client.create_flow_run_from_deployment(deployment_id=choosen_queue, parameters=param, db: session, name=str(uuid.uuid4))
print(".....................................................................................................................................................................................")
print(t)
print(".....................................................................................................................................................................................")
if __name__ == "__main__":
asyncio.run(trigger_flow_runs())
but it is giving me this below error, i don't know why it is trying sqllite3
sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such table: deployment
[SQL: SELECT deployment.id AS deployment_id, deployment.created AS deployment_created, deployment.updated AS deployment_updated, deployment.name AS deployment_name, deployment.version AS deployment_version, deployment.description AS deployment_description, deployment.manifest_path AS deployment_manifest_path, deployment.work_queue_name AS deployment_work_queue_name, deployment.infra_overrides AS deployment_infra_overrides, deployment.path AS deployment_path, deployment.entrypoint AS deployment_entrypoint, deployment.schedule AS deployment_schedule, deployment.is_schedule_active AS deployment_is_schedule_active, deployment.tags AS deployment_tags, deployment.parameters AS deployment_parameters, deployment.parameter_openapi_schema AS deployment_parameter_openapi_schema, deployment.created_by AS deployment_created_by, deployment.updated_by AS deployment_updated_by, deployment.flow_id AS deployment_flow_id, deployment.work_pool_queue_id AS deployment_work_pool_queue_id, deployment.infrastructure_document_id AS deployment_infrastructure_document_id, deployment.storage_document_id AS deployment_storage_document_id, work_pool_1.id AS work_pool_1_id, work_pool_1.created AS work_pool_1_created, work_pool_1.updated AS work_pool_1_updated, work_pool_1.name AS work_pool_1_name, work_pool_1.description AS work_pool_1_description, work_pool_1.type AS work_pool_1_type, work_pool_1.base_job_template AS work_pool_1_base_job_template, work_pool_1.is_paused AS work_pool_1_is_paused, work_pool_1.default_queue_id AS work_pool_1_default_queue_id, work_pool_1.concurrency_limit AS work_pool_1_concurrency_limit, work_pool_queue_1.id AS work_pool_queue_1_id, work_pool_queue_1.created AS work_pool_queue_1_created, work_pool_queue_1.updated AS work_pool_queue_1_updated, work_pool_queue_1.name AS work_pool_queue_1_name, work_pool_queue_1.description AS work_pool_queue_1_description, work_pool_queue_1.is_paused AS work_pool_queue_1_is_paused, work_pool_queue_1.concurrency_limit AS work_pool_queue_1_concurrency_limit, work_pool_queue_1.priority AS work_pool_queue_1_priority, work_pool_queue_1.work_pool_id AS work_pool_queue_1_work_pool_id
FROM deployment LEFT OUTER JOIN work_pool_queue AS work_pool_queue_1 ON work_pool_queue_1.id = deployment.work_pool_queue_id LEFT OUTER JOIN work_pool AS work_pool_1 ON work_pool_1.id = work_pool_queue_1.work_pool_id
WHERE deployment.id = :pk_1]
[parameters: {'pk_1': 'ddf0ae5c-7ed4-48b2-b417-535d767933f2'}]
(Background on this error at: <https://sqlalche.me/e/14/e3q8>)
I an using this below docker-compose file
version: "3.3"
services:
### Prefect Database
database:
image: postgres:15.1-alpine
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=orion
expose:
- 5432
volumes:
- db:/var/lib/postgresql/data
### MinIO for flow storage
minio:
image: minio/minio:latest
entrypoint: ["minio", "server", "--address", "0.0.0.0:9000", "--console-address", "0.0.0.0:9002", "/data"]
volumes:
- "minio:/data"
ports:
- 9000:9000
- 9002:9002
### Prefect Orion API
orion:
image: prefecthq/prefect:2.7.7-python3.11
restart: always
volumes:
- prefect:/root/.prefect
entrypoint: ["prefect", "orion", "start"]
working_dir: "/root/flows"
volumes:
- "./flows:/root/flows"
environment:
- PREFECT_API_URL=<http://172.16.12.17:4200/api>
- PREFECT_ORION_API_HOST=0.0.0.0
- PREFECT_ORION_DATABASE_CONNECTION_URL=<postgresql+asyncpg://postgres:postgres@database:5432/orion>
ports:
- 4200:4200
depends_on:
- database
## Prefect Agent
agent:
image: prefecthq/prefect:2.7.7-python3.11
restart: always
entrypoint: ["prefect", "agent", "start", "-q", "rbfracture", "testing"]
environment:
- PREFECT_API_URL=<http://172.16.12.17:4200/api>
# ### Prefect CLI
# cli:
# image: prefecthq/prefect:2.7.7-python3.11
# entrypoint: "bash"
# working_dir: "/root/flows"
# volumes:
# - "./flows:/root/flows"
# environment:
# - PREFECT_API_URL=<http://orion:4200/api>
volumes:
prefect:
db:
minio:
networks:
default:
external:
name: carpl_docker_backend
Can anyone please tell me how i can trigger it