Skip to content

Commit

Permalink
Merge branch 'microsoft:main' into update-modelid
Browse files Browse the repository at this point in the history
  • Loading branch information
SandraAhlgrimm committed Jan 5, 2024
2 parents ac73cb8 + 833a2d7 commit 11369bf
Show file tree
Hide file tree
Showing 178 changed files with 1,253 additions and 3,155 deletions.
3 changes: 3 additions & 0 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ profile = "ruff"
select = ["E", "F", "I"]
line-length = 120

[tool.black]
line-length = 120

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
4 changes: 1 addition & 3 deletions python/samples/kernel-syntax-examples/action_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@ async def main():
kernel = sk.Kernel()
api_key, org_id = sk.openai_settings_from_dot_env()

kernel.add_chat_service(
"chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)
)
kernel.add_chat_service("chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id))
kernel.import_skill(MathSkill(), "math")
kernel.import_skill(FileIOSkill(), "fileIO")
kernel.import_skill(TimeSkill(), "time")
Expand Down
16 changes: 4 additions & 12 deletions python/samples/kernel-syntax-examples/azure_chat_gpt_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@

kernel = sk.Kernel()

chat_service = sk_oai.AzureChatCompletion(
**azure_openai_settings_from_dot_env_as_dict(include_api_version=True)
)
chat_service = sk_oai.AzureChatCompletion(**azure_openai_settings_from_dot_env_as_dict(include_api_version=True))
kernel.add_chat_service("chat-gpt", chat_service)

## there are three ways to create the request settings in code: # noqa: E266
Expand All @@ -41,9 +39,7 @@
## The second method is useful when you are using a single service, and you want to have type checking on the request settings or when you are using multiple instances of the same type of service, for instance gpt-35-turbo and gpt-4, both in openai and both for chat. # noqa: E501 E266

## 3. create the request settings from the kernel based on the registered service class: # noqa: E266
req_settings = kernel.get_request_settings_from_service(
ChatCompletionClientBase, "chat-gpt"
)
req_settings = kernel.get_request_settings_from_service(ChatCompletionClientBase, "chat-gpt")
req_settings.max_tokens = 2000
req_settings.temperature = 0.7
req_settings.top_p = 0.8
Expand All @@ -52,15 +48,11 @@

prompt_config = sk.PromptTemplateConfig(completion=req_settings)

prompt_template = sk.ChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)

prompt_template.add_system_message(system_message)
prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am Mosscap, a chat bot. I'm trying to figure out what people need."
)
prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,8 @@
**azure_ai_search_settings
)
)
azure_chat_with_data_settings = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)
azure_chat_with_data_settings = sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)


Expand All @@ -50,23 +48,17 @@
)
kernel.add_chat_service("chat-gpt", chat_service)

prompt_config = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
data_source_settings=azure_chat_with_data_settings,
)
prompt_config = sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
data_source_settings=azure_chat_with_data_settings,
)

prompt_template = sk.ChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)

prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am an AI assistant here to answer your questions."
)
prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,8 @@
# Set index language
azure_aisearch_datasource.parameters.indexLanguage = "en"

azure_chat_with_data_settings = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)
azure_chat_with_data_settings = sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)

# For example, AI Search index may contain the following document:
Expand All @@ -44,20 +42,16 @@
chat_service,
)

prompt_config = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
inputLanguage="fr",
outputLanguage="de",
data_source_settings=azure_chat_with_data_settings,
)
prompt_config = sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
inputLanguage="fr",
outputLanguage="de",
data_source_settings=azure_chat_with_data_settings,
)

prompt_template = sk.ChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)

prompt_template.add_user_message("Bonjour!")
prompt_template.add_assistant_message(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,8 @@
)
)

azure_chat_with_data_settings = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)
azure_chat_with_data_settings = sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)

# For example, AI Search index may contain the following document:
Expand Down Expand Up @@ -63,22 +61,16 @@
# if you only want to use a specific function, set the name of that function in this parameter,
# the format for that is 'SkillName-FunctionName', (i.e. 'math-Add').
# if the model or api version do not support this you will get an error.
prompt_config = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
function_call="auto",
data_source_settings=azure_chat_with_data_settings,
)
)
prompt_template = OpenAIChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
prompt_config = sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
function_call="auto",
data_source_settings=azure_chat_with_data_settings,
)
prompt_template = OpenAIChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)
prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am an AI assistant here to answer your questions."
)
prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,8 @@
**azure_ai_search_settings
)
)
azure_chat_with_data_settings = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)
azure_chat_with_data_settings = sk_oai.OpenAIChatPromptTemplateWithDataConfig.AzureChatWithDataSettings(
dataSources=[azure_aisearch_datasource]
)


Expand All @@ -52,23 +50,17 @@
)
kernel.add_chat_service("chat-gpt", chat_service)

prompt_config = (
sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
data_source_settings=azure_chat_with_data_settings,
)
prompt_config = sk_oai.OpenAIChatPromptTemplateWithDataConfig.from_completion_parameters(
max_tokens=2000,
temperature=0.7,
top_p=0.8,
data_source_settings=azure_chat_with_data_settings,
)

prompt_template = sk.ChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)

prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am an AI assistant here to answer your questions."
)
prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,8 @@

async def populate_memory(kernel: sk.Kernel) -> None:
# Add some documents to the ACS semantic memory
await kernel.memory.save_information_async(
COLLECTION_NAME, id="info1", text="My name is Andrea"
)
await kernel.memory.save_information_async(
COLLECTION_NAME, id="info2", text="I currently work as a tour guide"
)
await kernel.memory.save_information_async(COLLECTION_NAME, id="info1", text="My name is Andrea")
await kernel.memory.save_information_async(COLLECTION_NAME, id="info2", text="I currently work as a tour guide")
await kernel.memory.save_information_async(
COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005"
)
Expand All @@ -32,9 +28,7 @@ async def populate_memory(kernel: sk.Kernel) -> None:
id="info4",
text="I visited France and Italy five times since 2015",
)
await kernel.memory.save_information_async(
COLLECTION_NAME, id="info5", text="My family is from New York"
)
await kernel.memory.save_information_async(COLLECTION_NAME, id="info5", text="My family is from New York")


async def search_acs_memory_questions(kernel: sk.Kernel) -> None:
Expand Down
4 changes: 1 addition & 3 deletions python/samples/kernel-syntax-examples/bing_search_skill.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@

async def main():
kernel = sk.Kernel()
deployment, key, endpoint, api_version = sk.azure_openai_settings_from_dot_env(
include_api_version=True
)
deployment, key, endpoint, api_version = sk.azure_openai_settings_from_dot_env(include_api_version=True)
kernel.add_chat_service(
"chat-gpt",
AzureChatCompletion(
Expand Down
12 changes: 3 additions & 9 deletions python/samples/kernel-syntax-examples/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,11 @@
kernel = sk.Kernel()

api_key, org_id = sk.openai_settings_from_dot_env()
kernel.add_chat_service(
"chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)
)
kernel.add_chat_service("chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id))

prompt_config = sk.PromptTemplateConfig.from_completion_parameters(
max_tokens=2000, temperature=0.7, top_p=0.4
)
prompt_config = sk.PromptTemplateConfig.from_completion_parameters(max_tokens=2000, temperature=0.7, top_p=0.4)

prompt_template = sk.PromptTemplate(
sk_prompt, kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.PromptTemplate(sk_prompt, kernel.prompt_template_engine, prompt_config)

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
16 changes: 4 additions & 12 deletions python/samples/kernel-syntax-examples/chat_gpt_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,15 @@
kernel = sk.Kernel()

api_key, org_id = sk.openai_settings_from_dot_env()
kernel.add_chat_service(
"chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)
)
kernel.add_chat_service("chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id))

prompt_config = sk.PromptTemplateConfig.from_completion_parameters(
max_tokens=2000, temperature=0.7, top_p=0.8
)
prompt_config = sk.PromptTemplateConfig.from_completion_parameters(max_tokens=2000, temperature=0.7, top_p=0.8)

prompt_template = sk.ChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)

prompt_template.add_system_message(system_message)
prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am Mosscap, a chat bot. I'm trying to figure out what people need."
)
prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,10 @@
functions=get_function_calling_object(kernel, {"exclude_skill": ["ChatBot"]}),
)
)
prompt_template = OpenAIChatPromptTemplate(
"{{$user_input}}", kernel.prompt_template_engine, prompt_config
)
prompt_template = OpenAIChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config)
prompt_template.add_system_message(system_message)
prompt_template.add_user_message("Hi there, who are you?")
prompt_template.add_assistant_message(
"I am Mosscap, a chat bot. I'm trying to figure out what people need."
)
prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.")

function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)
chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config)
Expand Down
4 changes: 1 addition & 3 deletions python/samples/kernel-syntax-examples/google_palm_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@


async def chat_request_example(api_key):
palm_chat_completion = sk_gp.GooglePalmChatCompletion(
"models/chat-bison-001", api_key
)
palm_chat_completion = sk_gp.GooglePalmChatCompletion("models/chat-bison-001", api_key)
settings = GooglePalmChatRequestSettings()
settings.temperature = 1

Expand Down
Loading

0 comments on commit 11369bf

Please sign in to comment.