Skip to content

Openai utils

embedding_content(prompt, model_name='text-embedding-3-small')

Parameters:

Name Type Description Default
prompt

The prompt to generate the embedding for

required
model_name

The model to use for generating the embedding

'text-embedding-3-small'
Source code in TimelineKGQA/openai_utils.py
119
120
121
122
123
124
125
126
127
128
def embedding_content(prompt, model_name="text-embedding-3-small"):
    """
    Args:
        prompt: The prompt to generate the embedding for
        model_name: The model to use for generating the embedding

    """
    response = client.embeddings.create(input=prompt, model=model_name)

    return response.data[0].embedding

paraphrase_medium_question(question, model_name='gpt-4o')

Paraphrases the given question using the OpenAI model specified.

Parameters:

Name Type Description Default
question str

The question to paraphrase.

required
model_name str

The model to use for paraphrasing.

'gpt-4o'

Returns:

Name Type Description
str str

The paraphrased question.

Source code in TimelineKGQA/openai_utils.py
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def paraphrase_medium_question(
    question: str,
    model_name: str = "gpt-4o",
) -> str:
    """
    Paraphrases the given question using the OpenAI model specified.

    Args:
        question (str): The question to paraphrase.

        model_name (str): The model to use for paraphrasing.

    Returns:
        str: The paraphrased question.
    """
    prompt_text = f"Paraphrase the following question: '{question}'"
    try:
        # Some examples include:
        # Who is affiliated with the organization during a given time.
        # Which or what's the organization's name a specific guy is affiliated to.
        # When/During/when is start time ...
        # Etc.
        # If there is a statement from beginning of time to the end of time, this will mean it is always true for the whole timeline.
        response = client.chat.completions.create(
            model=model_name,
            messages=[
                {
                    "role": "system",
                    "content": """You are an expert on paraphrasing questions.
                                  ??? is the masked out answer
                                  You job is paraphrasing this into a natural language question.
                                  The missing part can be someone, some organisation or some time.
                                  Representive question types include:
                                  - When/Before/After/During/(temporal conditions calculated based on Timeline Operation) event A happen, who is leader of organization B?
                                  - Is event A before/after/during event B?
                                  And we should not mention any specific time in the question, it is a type of implicit temporal questions.
                                  """,
                },
                {
                    "role": "user",
                    "content": prompt_text,
                },
            ],
            max_tokens=100,
            temperature=0.8,
            stop=["\n"],
        )
        paraphrased_question = response.choices[0].message.content
        return paraphrased_question
    except Exception as e:
        print(f"An error occurred: {e}")
        return ""

paraphrase_simple_question(question, answer=None, answer_type=None, model_name='gpt-4o')

Paraphrases the given question using the OpenAI model specified.

Parameters:

Name Type Description Default
question str

The question to paraphrase.

required
answer str

The answer to the question, which can help in generating a context-aware paraphrase.

None
answer_type str

The type of the answer, which can help in generating a context-aware paraphrase.

None
model_name str

The model to use for paraphrasing.

'gpt-4o'

Returns:

Name Type Description
str str

The paraphrased question.

Source code in TimelineKGQA/openai_utils.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
def paraphrase_simple_question(
    question: str,
    answer: str = None,
    answer_type: str = None,
    model_name: str = "gpt-4o",
) -> str:
    """
    Paraphrases the given question using the OpenAI model specified.

    Args:
        question (str): The question to paraphrase.
        answer (str, optional): The answer to the question, which can help in generating a context-aware paraphrase.
        answer_type (str, optional): The type of the answer, which can help in generating a context-aware paraphrase.
        model_name (str): The model to use for paraphrasing.

    Returns:
        str: The paraphrased question.
    """
    prompt_text = f"Paraphrase the following question: '{question}'"
    try:
        # Some examples include:
        # Who is affiliated with the organization during a given time.
        # Which or what's the organization's name a specific guy is affiliated to.
        # When/During/when is start time ...
        # Etc.
        # If there is a statement from beginning of time to the end of time,
        # this will mean it is always true for the whole timeline.
        response = client.chat.completions.create(
            model=model_name,
            messages=[
                {
                    "role": "system",
                    "content": """You are an expert on paraphrasing questions.
                                  You job is paraphrasing this into a natural language question.
                                  The missing part can be someone, some organisation or some time.
                                  Use diverse ways to represent the temporal aspect of the question.
                                  Only return the paraphrased question, nothing else.
                                  """,
                },
                {
                    "role": "user",
                    "content": prompt_text,
                },
            ],
            max_tokens=100,
            temperature=0.8,
            stop=["\n"],
        )
        paraphrased_question = response.choices[0].message.content
        return paraphrased_question
    except Exception as e:
        print(f"An error occurred: {e}")
        return ""