Skip to content

Commit d73b63e

Browse files
committed
formatting
1 parent 666987a commit d73b63e

File tree

5 files changed

+164
-107
lines changed

5 files changed

+164
-107
lines changed

tutorial/build_index.py

Lines changed: 35 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,26 @@
11
import os
22
from dotenv import load_dotenv
3+
34
load_dotenv()
45

56
from azure.ai.ml import MLClient
67
from azure.identity import DefaultAzureCredential
78
from azure.ai.ml.entities import Index
89

9-
from promptflow.rag.config import LocalSource, AzureAISearchConfig, EmbeddingsModelConfig, ConnectionConfig
10+
from promptflow.rag.config import (
11+
LocalSource,
12+
AzureAISearchConfig,
13+
EmbeddingsModelConfig,
14+
ConnectionConfig,
15+
)
1016
from promptflow.rag import build_index
1117

12-
client = MLClient(DefaultAzureCredential(), os.getenv("AZURE_SUBSCRIPTION_ID"), os.getenv("AZURE_RESOURCE_GROUP"), os.getenv("AZUREAI_PROJECT_NAME"))
18+
client = MLClient(
19+
DefaultAzureCredential(),
20+
os.getenv("AZURE_SUBSCRIPTION_ID"),
21+
os.getenv("AZURE_RESOURCE_GROUP"),
22+
os.getenv("AZUREAI_PROJECT_NAME"),
23+
)
1324
import os
1425

1526
# append directory of the current script to data directory
@@ -18,43 +29,45 @@
1829

1930
# Check if the directory exists
2031
if os.path.exists(data_directory):
21-
files = os.listdir(data_directory) # List all files in the directory
32+
files = os.listdir(data_directory) # List all files in the directory
2233
if files:
23-
print(f"Data directory '{data_directory}' exists and contains {len(files)} files.")
34+
print(
35+
f"Data directory '{data_directory}' exists and contains {len(files)} files."
36+
)
2437
else:
2538
print(f"Data directory '{data_directory}' exists but is empty.")
2639
exit()
2740
else:
2841
print(f"Data directory '{data_directory}' does not exist.")
2942
exit()
3043

31-
index_name = "tutorial-index" # your desired index name
44+
index_name = "tutorial-index" # your desired index name
3245
index_path = build_index(
3346
name=index_name, # name of your index
3447
vector_store="azure_ai_search", # the type of vector store - in this case it is Azure AI Search. Users can also use "azure_cognitive search"
3548
embeddings_model_config=EmbeddingsModelConfig(
36-
model_name=os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT'),
37-
deployment_name=os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT'),
38-
connection_config=ConnectionConfig(
39-
subscription_id=client.subscription_id,
40-
resource_group_name=client.resource_group_name,
41-
workspace_name=client.workspace_name,
42-
connection_name=os.getenv('AZURE_OPENAI_CONNECTION_NAME')
43-
)
49+
model_name=os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT"),
50+
deployment_name=os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT"),
51+
connection_config=ConnectionConfig(
52+
subscription_id=client.subscription_id,
53+
resource_group_name=client.resource_group_name,
54+
workspace_name=client.workspace_name,
55+
connection_name=os.getenv("AZURE_OPENAI_CONNECTION_NAME"),
56+
),
4457
),
4558
input_source=LocalSource(input_data=data_directory), # the location of your files
4659
index_config=AzureAISearchConfig(
47-
ai_search_index_name=index_name, # the name of the index store inside the azure ai search service
60+
ai_search_index_name=index_name, # the name of the index store inside the azure ai search service
4861
ai_search_connection_config=ConnectionConfig(
49-
subscription_id=client.subscription_id,
50-
resource_group_name=client.resource_group_name,
51-
workspace_name=client.workspace_name,
52-
connection_name=os.getenv('AZURE_SEARCH_CONNECTION_NAME')
53-
)
62+
subscription_id=client.subscription_id,
63+
resource_group_name=client.resource_group_name,
64+
workspace_name=client.workspace_name,
65+
connection_name=os.getenv("AZURE_SEARCH_CONNECTION_NAME"),
66+
),
5467
),
55-
tokens_per_chunk = 800, # Optional field - Maximum number of tokens per chunk
56-
token_overlap_across_chunks = 0, # Optional field - Number of tokens to overlap between chunks
68+
tokens_per_chunk=800, # Optional field - Maximum number of tokens per chunk
69+
token_overlap_across_chunks=0, # Optional field - Number of tokens to overlap between chunks
5770
)
5871

5972
# register the index so that it shows up in the cloud project
60-
client.indexes.create_or_update(Index(name=index_name, path=index_path))
73+
client.indexes.create_or_update(Index(name=index_name, path=index_path))

tutorial/copilot_flow/copilot.py

Lines changed: 36 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import os
22
from dotenv import load_dotenv
3+
34
load_dotenv()
45

56
from promptflow.core import Prompty, AzureOpenAIModelConfiguration
@@ -23,79 +24,87 @@ def get_documents(search_query: str, num_docs=3):
2324
search_client = SearchClient(
2425
endpoint=os.getenv("AZURE_SEARCH_ENDPOINT"),
2526
credential=DefaultAzureCredential(),
26-
index_name=index_name)
27+
index_name=index_name,
28+
)
2729

2830
aoai_client = AzureOpenAI(
2931
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
3032
azure_ad_token_provider=token_provider,
31-
api_version=os.getenv("AZURE_OPENAI_API_VERSION")
33+
api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
3234
)
3335

3436
# generate a vector embedding of the user's question
35-
embedding = aoai_client.embeddings.create(input=search_query,
36-
model=os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT"))
37+
embedding = aoai_client.embeddings.create(
38+
input=search_query, model=os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT")
39+
)
3740
embedding_to_query = embedding.data[0].embedding
3841

3942
context = ""
4043
# use the vector embedding to do a vector search on the index
41-
vector_query = VectorizedQuery(vector=embedding_to_query, k_nearest_neighbors=num_docs, fields="contentVector")
44+
vector_query = VectorizedQuery(
45+
vector=embedding_to_query, k_nearest_neighbors=num_docs, fields="contentVector"
46+
)
4247
results = trace(search_client.search)(
43-
search_text="",
44-
vector_queries=[vector_query],
45-
select=["id", "content"])
48+
search_text="", vector_queries=[vector_query], select=["id", "content"]
49+
)
4650

4751
for result in results:
4852
context += f"\n>>> From: {result['id']}\n{result['content']}"
4953

5054
return context
55+
56+
5157
# <get_documents>
5258

5359
from promptflow.core import Prompty, AzureOpenAIModelConfiguration
5460

5561
from pathlib import Path
5662
from typing import TypedDict
5763

64+
5865
class ChatResponse(TypedDict):
5966
context: dict
6067
reply: str
6168

69+
6270
def get_chat_response(chat_input: str, chat_history: list = []) -> ChatResponse:
6371
model_config = AzureOpenAIModelConfiguration(
6472
azure_deployment=os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT"),
6573
api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
66-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT")
74+
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
6775
)
6876

6977
searchQuery = chat_input
7078

7179
# Only extract intent if there is chat_history
7280
if len(chat_history) > 0:
7381
# extract current query intent given chat_history
74-
path_to_prompty = f"{Path(__file__).parent.absolute().as_posix()}/queryIntent.prompty" # pass absolute file path to prompty
75-
intentPrompty = Prompty.load(path_to_prompty, model={
76-
'configuration': model_config,
77-
'parameters': {
78-
'max_tokens': 256,
79-
}
80-
})
82+
path_to_prompty = f"{Path(__file__).parent.absolute().as_posix()}/queryIntent.prompty" # pass absolute file path to prompty
83+
intentPrompty = Prompty.load(
84+
path_to_prompty,
85+
model={
86+
"configuration": model_config,
87+
"parameters": {
88+
"max_tokens": 256,
89+
},
90+
},
91+
)
8192
searchQuery = intentPrompty(query=chat_input, chat_history=chat_history)
8293

8394
# retrieve relevant documents and context given chat_history and current user query (chat_input)
8495
documents = get_documents(searchQuery, 3)
8596

8697
# send query + document context to chat completion for a response
8798
path_to_prompty = f"{Path(__file__).parent.absolute().as_posix()}/chat.prompty"
88-
chatPrompty = Prompty.load(path_to_prompty, model={
89-
'configuration': model_config,
90-
'parameters': {
91-
'max_tokens': 256,
92-
'temperature': 0.2
93-
}
94-
})
99+
chatPrompty = Prompty.load(
100+
path_to_prompty,
101+
model={
102+
"configuration": model_config,
103+
"parameters": {"max_tokens": 256, "temperature": 0.2},
104+
},
105+
)
95106
result = chatPrompty(
96-
chat_history=chat_history,
97-
chat_input=chat_input,
98-
documents=documents
107+
chat_history=chat_history, chat_input=chat_input, documents=documents
99108
)
100109

101-
return dict(reply=result, context=documents)
110+
return dict(reply=result, context=documents)

tutorial/deploy.py

Lines changed: 48 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,34 @@
11
# <deploy>
22
import os
33
from dotenv import load_dotenv
4+
45
load_dotenv()
56

67
from azure.ai.ml import MLClient
78
from azure.identity import DefaultAzureCredential
8-
from azure.ai.ml.entities import ManagedOnlineEndpoint, ManagedOnlineDeployment, Model, Environment, BuildContext
9+
from azure.ai.ml.entities import (
10+
ManagedOnlineEndpoint,
11+
ManagedOnlineDeployment,
12+
Model,
13+
Environment,
14+
BuildContext,
15+
)
916

10-
client = MLClient(DefaultAzureCredential(), os.getenv("AZURE_SUBSCRIPTION_ID"), os.getenv("AZURE_RESOURCE_GROUP"), os.getenv("AZUREAI_PROJECT_NAME"))
11-
endpoint_name = "tutorial-endpoint"
17+
client = MLClient(
18+
DefaultAzureCredential(),
19+
os.getenv("AZURE_SUBSCRIPTION_ID"),
20+
os.getenv("AZURE_RESOURCE_GROUP"),
21+
os.getenv("AZUREAI_PROJECT_NAME"),
22+
)
23+
endpoint_name = "tutorial-endpoint"
1224
deployment_name = "tutorial-deployment"
1325

1426
endpoint = ManagedOnlineEndpoint(
1527
name=endpoint_name,
1628
properties={
17-
"enforce_access_to_default_secret_stores": "enabled" # for secret injection support
29+
"enforce_access_to_default_secret_stores": "enabled" # for secret injection support
1830
},
19-
auth_mode="aad_token" # using aad auth instead of key-based auth
31+
auth_mode="aad_token", # using aad auth instead of key-based auth
2032
)
2133

2234
# Get the directory of the current script
@@ -29,13 +41,13 @@
2941
endpoint_name=endpoint_name,
3042
model=Model(
3143
name="copilot_flow_model",
32-
path=copilot_path, # path to promptflow folder
33-
properties=[ # this enables the chat interface in the endpoint test tab
44+
path=copilot_path, # path to promptflow folder
45+
properties=[ # this enables the chat interface in the endpoint test tab
3446
["azureml.promptflow.source_flow_id", "basic-chat"],
3547
["azureml.promptflow.mode", "chat"],
3648
["azureml.promptflow.chat_input", "chat_input"],
37-
["azureml.promptflow.chat_output", "reply"]
38-
]
49+
["azureml.promptflow.chat_output", "reply"],
50+
],
3951
),
4052
environment=Environment(
4153
build=BuildContext(
@@ -50,7 +62,7 @@
5062
"path": "/health",
5163
"port": 8080,
5264
},
53-
"scoring_route":{
65+
"scoring_route": {
5466
"path": "/score",
5567
"port": 8080,
5668
},
@@ -60,41 +72,52 @@
6072
instance_count=1,
6173
environment_variables={
6274
"PRT_CONFIG_OVERRIDE": f"deployment.subscription_id={client.subscription_id},deployment.resource_group={client.resource_group_name},deployment.workspace_name={client.workspace_name},deployment.endpoint_name={endpoint_name},deployment.deployment_name={deployment_name}",
63-
'AZURE_OPENAI_ENDPOINT': os.getenv('AZURE_OPENAI_ENDPOINT'),
64-
'AZURE_SEARCH_ENDPOINT': os.getenv('AZURE_SEARCH_ENDPOINT'),
65-
'AZURE_OPENAI_API_VERSION': os.getenv('AZURE_OPENAI_API_VERSION'),
66-
'AZURE_OPENAI_CHAT_DEPLOYMENT': os.getenv('AZURE_OPENAI_CHAT_DEPLOYMENT'),
67-
'AZURE_OPENAI_EVALUATION_DEPLOYMENT': os.getenv('AZURE_OPENAI_EVALUATION_DEPLOYMENT'),
68-
'AZURE_OPENAI_EMBEDDING_DEPLOYMENT': os.getenv('AZURE_OPENAI_EMBEDDING_DEPLOYMENT'),
69-
'AZUREAI_SEARCH_INDEX_NAME': os.getenv('AZUREAI_SEARCH_INDEX_NAME')
70-
}
75+
"AZURE_OPENAI_ENDPOINT": os.getenv("AZURE_OPENAI_ENDPOINT"),
76+
"AZURE_SEARCH_ENDPOINT": os.getenv("AZURE_SEARCH_ENDPOINT"),
77+
"AZURE_OPENAI_API_VERSION": os.getenv("AZURE_OPENAI_API_VERSION"),
78+
"AZURE_OPENAI_CHAT_DEPLOYMENT": os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT"),
79+
"AZURE_OPENAI_EVALUATION_DEPLOYMENT": os.getenv(
80+
"AZURE_OPENAI_EVALUATION_DEPLOYMENT"
81+
),
82+
"AZURE_OPENAI_EMBEDDING_DEPLOYMENT": os.getenv(
83+
"AZURE_OPENAI_EMBEDDING_DEPLOYMENT"
84+
),
85+
"AZUREAI_SEARCH_INDEX_NAME": os.getenv("AZUREAI_SEARCH_INDEX_NAME"),
86+
},
7187
)
7288

7389
# 1. create endpoint
74-
created_endpoint = client.begin_create_or_update(endpoint).result() # result() means we wait on this to complete
90+
created_endpoint = client.begin_create_or_update(
91+
endpoint
92+
).result() # result() means we wait on this to complete
7593

7694
# 2. create deployment
7795
created_deployment = client.begin_create_or_update(deployment).result()
7896

7997
# 3. update endpoint traffic for the deployment
80-
endpoint.traffic = {deployment_name: 100} # 100% of traffic
98+
endpoint.traffic = {deployment_name: 100} # 100% of traffic
8199
client.begin_create_or_update(endpoint).result()
82100
# </deploy>
83101

84102
# <report>
85-
def get_ai_studio_url_for_deploy(client: MLClient, endpoint_name: str, deployment_name) -> str:
103+
def get_ai_studio_url_for_deploy(
104+
client: MLClient, endpoint_name: str, deployment_name
105+
) -> str:
86106
studio_base_url = "https://ai.azure.com"
87-
deployment_url = (
88-
f"{studio_base_url}/projectdeployments/realtime/{endpoint_name}/{deployment_name}/detail?wsid=/subscriptions/{client.subscription_id}/resourceGroups/{client.resource_group_name}/providers/Microsoft.MachineLearningServices/workspaces/{client.workspace_name}&deploymentName={deployment_name}"
89-
)
107+
deployment_url = f"{studio_base_url}/projectdeployments/realtime/{endpoint_name}/{deployment_name}/detail?wsid=/subscriptions/{client.subscription_id}/resourceGroups/{client.resource_group_name}/providers/Microsoft.MachineLearningServices/workspaces/{client.workspace_name}&deploymentName={deployment_name}"
90108

91109
return deployment_url
92110

111+
93112
print("\n ~~~Deployment details~~~")
94113
print(f"Your online endpoint name is: {endpoint_name}")
95114
print(f"Your deployment name is: {deployment_name}")
96115

97116
print("\n ~~~Test in the Azure AI Studio~~~")
98117
print("\n Follow this link to your deployment in the Azure AI Studio:")
99-
print(get_ai_studio_url_for_deploy(client=client, endpoint_name=endpoint_name, deployment_name=deployment_name))
118+
print(
119+
get_ai_studio_url_for_deploy(
120+
client=client, endpoint_name=endpoint_name, deployment_name=deployment_name
121+
)
122+
)
100123
# </report>

0 commit comments

Comments
 (0)