forked from langfuse/langfuse
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.dev.example
More file actions
151 lines (126 loc) · 5.27 KB
/
.env.dev.example
File metadata and controls
151 lines (126 loc) · 5.27 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# When adding additional environment variables, the schema in "/src/env.mjs"
# should be updated accordingly.
# ============================================================================
# DOCKER CONFIGURATION
# ============================================================================
# These variables configure docker-compose port mappings and container names
# To run multiple instances, copy this file to .env and customize these values
# Host Ports
# POSTGRES_HOST_PORT=5432
# REDIS_HOST_PORT=6379
# CLICKHOUSE_HTTP_PORT=8123
# CLICKHOUSE_NATIVE_PORT=9000
# MINIO_API_PORT=9090
# MINIO_CONSOLE_PORT=9091
# WEB_HOST_PORT=3000
# WORKER_HOST_PORT=3030
# Container Names
# POSTGRES_CONTAINER_NAME=langfuse-postgres
# CLICKHOUSE_CONTAINER_NAME=langfuse-clickhouse
# REDIS_CONTAINER_NAME=langfuse-redis
# MINIO_CONTAINER_NAME=langfuse-minio
# WEB_CONTAINER_NAME=langfuse-web
# WORKER_CONTAINER_NAME=langfuse-worker
# Volumes
# POSTGRES_VOLUME_NAME=langfuse_postgres_data
# CLICKHOUSE_DATA_VOLUME_NAME=langfuse_clickhouse_data
# CLICKHOUSE_LOGS_VOLUME_NAME=langfuse_clickhouse_logs
# MINIO_VOLUME_NAME=langfuse_minio_data
# Network
# DOCKER_NETWORK_NAME=langfuse-network
# ============================================================================
# APPLICATION CONFIGURATION
# ============================================================================
# Prisma
# https://www.prisma.io/docs/reference/database-reference/connection-urls#env
DIRECT_URL="postgresql://postgres:postgres@localhost:5432/postgres"
DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres"
# Clickhouse
CLICKHOUSE_MIGRATION_URL="clickhouse://localhost:9000"
CLICKHOUSE_URL="http://localhost:8123"
CLICKHOUSE_USER="clickhouse"
CLICKHOUSE_PASSWORD="clickhouse"
CLICKHOUSE_CLUSTER_ENABLED="false"
# Next Auth
# You can generate a new secret on the command line with:
# openssl rand -base64 32
# https://next-auth.js.org/configuration/options#secret
# NEXTAUTH_SECRET=""
NEXTAUTH_URL="http://localhost:3000"
NEXTAUTH_SECRET="secret"
# Langfuse Cloud Environment
NEXT_PUBLIC_LANGFUSE_CLOUD_REGION="DEV"
# Langfuse experimental features
LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES="true"
# Salt for API key hashing
SALT="salt"
# Email
EMAIL_FROM_ADDRESS="" # Defines the email address to use as the from address.
SMTP_CONNECTION_URL="" # Defines the connection url for smtp server.
CLOUD_CRM_EMAIL="" # Optional BCC address for usage threshold emails (e.g., for CRM integration like HubSpot)
# S3 Batch Exports
LANGFUSE_S3_BATCH_EXPORT_ENABLED=true
LANGFUSE_S3_BATCH_EXPORT_BUCKET=langfuse
LANGFUSE_S3_BATCH_EXPORT_ACCESS_KEY_ID=minio
LANGFUSE_S3_BATCH_EXPORT_SECRET_ACCESS_KEY=miniosecret
LANGFUSE_S3_BATCH_EXPORT_REGION=us-east-1
LANGFUSE_S3_BATCH_EXPORT_ENDPOINT=http://localhost:9090
## Necessary for minio compatibility
LANGFUSE_S3_BATCH_EXPORT_FORCE_PATH_STYLE=true
LANGFUSE_S3_BATCH_EXPORT_PREFIX=exports/
# S3 Media Upload LOCAL
LANGFUSE_S3_MEDIA_UPLOAD_BUCKET=langfuse
LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID=minio
LANGFUSE_S3_MEDIA_UPLOAD_SECRET_ACCESS_KEY=miniosecret
LANGFUSE_S3_MEDIA_UPLOAD_REGION=us-east-1
LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT=http://localhost:9090
## Necessary for minio compatibility
LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE=true
LANGFUSE_S3_MEDIA_UPLOAD_PREFIX=media/
# S3 Event Bucket Upload
## Set to true to test uploading all events to S3
LANGFUSE_S3_EVENT_UPLOAD_BUCKET=langfuse
LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID=minio
LANGFUSE_S3_EVENT_UPLOAD_SECRET_ACCESS_KEY=miniosecret
LANGFUSE_S3_EVENT_UPLOAD_REGION=us-east-1
LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT=http://localhost:9090
## Necessary for minio compatibility
LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE=true
LANGFUSE_S3_EVENT_UPLOAD_PREFIX=events/
# Set during docker build of application
# Used to disable environment verification at build time
# DOCKER_BUILD=1
REDIS_HOST="127.0.0.1"
REDIS_PORT=6379
REDIS_AUTH="myredissecret"
# REDIS_SENTINEL_ENABLED="false"
# REDIS_SENTINEL_NODES="sentinel1:26379,sentinel2:26379"
# REDIS_SENTINEL_MASTER_NAME="mymaster"
# REDIS_SENTINEL_USERNAME=""
# REDIS_SENTINEL_PASSWORD=""
# openssl rand -hex 32 used only here
ENCRYPTION_KEY=0000000000000000000000000000000000000000000000000000000000000000
# speeds up local development by not executing init scripts on server startup
NEXT_PUBLIC_LANGFUSE_RUN_NEXT_INIT="false"
# For SDK integration tests to pass, decrease the ingestion queue delay by uncommenting the env vars:
# LANGFUSE_INGESTION_QUEUE_DELAY_MS=10
# LANGFUSE_INGESTION_CLICKHOUSE_WRITE_INTERVAL_MS=10
# Slack credentials for development
SLACK_CLIENT_ID=your_slack_client_id
SLACK_CLIENT_SECRET=your_slack_client_secret
SLACK_STATE_SECRET=your_slack_state_secret
# Langfuse AI instance for tracing, prompts
LANGFUSE_AI_FEATURES_PUBLIC_KEY="pk-lf-1234567890"
LANGFUSE_AI_FEATURES_SECRET_KEY="sk-lf-1234567890"
LANGFUSE_AI_FEATURES_HOST="http://localhost:3000"
LANGFUSE_AI_FEATURES_PROJECT_ID=7a88fb47-b4e2-43b8-a06c-a5ce950dc53a
# Langfuse AI Bedrock credentials
AWS_ACCESS_KEY_ID="A123456789"
AWS_SECRET_ACCESS_KEY="SAK123456789"
LANGFUSE_AWS_BEDROCK_REGION="eu-west-1"
LANGFUSE_AWS_BEDROCK_MODEL="eu.anthropic.claude-3-haiku-20240307-v1:0"
# Events table migration
LANGFUSE_ENABLE_EVENTS_TABLE_OBSERVATIONS=true
LANGFUSE_ENABLE_EVENTS_TABLE_FLAGS=true
LANGFUSE_ENABLE_EVENTS_TABLE_V2_APIS=true
LANGFUSE_EXPERIMENT_INSERT_INTO_EVENTS_TABLE=true