Happ.AI

#SapnosezyadAI

Get Response

Note: Use the python template to append user and assistant messages to the conversation object

Python Quickstart
              

import requests
import json

# Define a sample business utility function: Book Tickets
def book_tickets():
    """
    Sample business utility function to demonstrate booking tickets.

    Replace this with your actual booking tickets logic.
    """
    print("Booking Tickets...")
    # Your booking tickets logic here
    print("Tickets Booked Successfully.")

# Define a sample business utility function: Pay Bills
def pay_bills():
    """
    Sample business utility function to demonstrate paying bills.

    Replace this with your actual bill payment logic.
    """
    print("Paying Bills...")
    # Your bill payment logic here
    print("Bills Paid Successfully.")

# Define a function to execute your business-utility functions
def execute_function(function_name):
    """
    Executes a user-defined business utility function by its name if it exists in the global scope.

    Args:
        function_name (str): The name of the business utility function to be executed.

    Returns:
        None
    """
    if function_name in globals() and callable(globals()[function_name]):
        globals()[function_name]()
    else:
        print(f"Business utility function '{function_name}' not found or not callable.")

# Define a function to extract adjusted similarity values and related content
def extract_adjusted_similarity_values(s):
    """
    Extracts adjusted similarity values and related business content from a string.

    Args:
        s (str): The input string containing adjusted similarity values and business content.

    Returns:
        list: A list of dictionaries containing adjusted similarity and business content.
    """
    results = []
    start = 0
    while True:
        start = s.find('"adjusted_similarity":', start)
        if start == -1:
            break
        start_value = start + len('"adjusted_similarity":')
        end_value = s.find(',', start_value)
        adjusted_similarity = float(s[start_value:end_value].strip())

        start_output = s.find('"output":"', end_value) + len('"output":"')
        end_output = s.find('"}', start_output) if s[start_output - 2] != ']' else s.find('"}]', start_output)
        output = s[start_output:end_output].strip()

        results.append({"adjusted_similarity": adjusted_similarity, "business_content": output})
        start = end_value

    return results

# Specify the id of your trained transformer
transformer_id = "2"
# Action can either be PrefrontalCortex or Amygdala
action = "PrefrontalCortex"
user_message = input("Enter your message: ")
print("You entered:", user_message)

# Construct the conversation array
conversation = [
    {"content": "You are a really helpful assistant", "role": "system"},
    {"content": "Hey there, how can I assist you today?", "role": "assistant"},
    {"content": user_message, "role": "user"}
]

# Prepare the request body
request_body = {
    "conversation": conversation,
    "action": action,
    "transformer_id": transformer_id
}

# Endpoint URL
url = "https://ai.wiom.in/process_rgwai"

# Make the POST request
response = requests.post(url, json=request_body)

# Process and print the modified response
modified_response = response.text.replace('\\', '')
if modified_response.startswith('"') and modified_response.endswith('"'):
    modified_response = modified_response[1:-1]

# Extract Amygdala and Prefrontal Cortex results
start_amygdala = modified_response.find('"amygdala_result":"') + len('"amygdala_result":"')
end_amygdala = modified_response.find(']"', start_amygdala) + 1
amygdala_result_raw = modified_response[start_amygdala:end_amygdala]

start_prefrontal = modified_response.find('"prefrontal_cortex_result":"') + len('"prefrontal_cortex_result":"')
end_prefrontal = modified_response.find('}"', start_prefrontal) + 1
prefrontal_cortex_result_raw = modified_response[start_prefrontal:end_prefrontal]

# Process Amygdala results and execute relevant business actions
amygdala_result = json.loads(amygdala_result_raw)
for result in amygdala_result:
    if result["adjusted_similarity"] > 60.0:
        function_name = result["output"]
        print(f"Amygdala Adjusted Similarity: {result['adjusted_similarity']}", f"Business Action: {function_name}")
        print()
        execute_function(function_name)

# Process Prefrontal Cortex results
extracted_results = extract_adjusted_similarity_values(prefrontal_cortex_result_raw)

# Find the item with the highest adjusted similarity
highest_similarity_item = max(extracted_results, key=lambda x: x["adjusted_similarity"])
print()

# Print the adjusted similarity and business content with the highest relevance
print("Highest Adjusted Similarity:", highest_similarity_item["adjusted_similarity"])
print("Relevant Business Content:", highest_similarity_item["business_content"])

# What next? ... depending on business requirements, you can choose to use Amygdala and Prefrontal Cortex results in isolation or cumulatively, and even make their usage contingent on a minimum similarity score.

            
          
Playground









Train

1. Create Transformers: To create a new fine-tuned transformer, format your training data using this template, then upload the csv files below. You can create multiple transformers with the same set of CSV files by setting the id of all rows to unique alphabets for each transformer. Then, note down the id designated which is displayed after submitting the files.
2. Update Transformers: To update existing transformers, format your training data using this template, then upload the csv files below. You can update multiple transformers with the same set of CSV files by setting the id of all rows to their corresponding transformer ids.

Python Quickstart
              

import requests

# File paths
training_data_file_path = 'path/to/your/training_data.csv'
vpfc_globals_file_path = 'path/to/your/vpfc_globals.csv'
url = 'https://ai.wiom.in/transform_transformer'

# Open the files in binary mode
with open(training_data_file_path, 'rb') as training_data, \
     open(vpfc_globals_file_path, 'rb') as vpfc_globals:

    # Create a dictionary of files
    files = {
        'training_data': training_data,
        'vpfc_globals': vpfc_globals
    }

    # Send the POST request
    response = requests.post(url, files=files)

    # Print the response
    print("Status Code:", response.status_code)
    print("Response Body:", response.text)

            
          
Playground