Switch to unified view

a b/src/codellama-main/example_instructions.py
1
# Copyright (c) Meta Platforms, Inc. and affiliates.
2
# This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
3
4
from typing import Optional
5
6
import fire
7
8
from llama import Llama
9
10
11
def main(
12
    ckpt_dir: str,
13
    tokenizer_path: str,
14
    temperature: float = 0.2,
15
    top_p: float = 0.95,
16
    max_seq_len: int = 512,
17
    max_batch_size: int = 8,
18
    max_gen_len: Optional[int] = None,
19
):
20
    generator = Llama.build(
21
        ckpt_dir=ckpt_dir,
22
        tokenizer_path=tokenizer_path,
23
        max_seq_len=max_seq_len,
24
        max_batch_size=max_batch_size,
25
    )
26
27
    instructions = [
28
        [
29
            {
30
                "role": "user",
31
                "content": "In Bash, how do I list all text files in the current directory (excluding subdirectories) that have been modified in the last month?",
32
            }
33
        ],
34
        [
35
            {
36
                "role": "user",
37
                "content": "What is the difference between inorder and preorder traversal? Give an example in Python.",
38
            }
39
        ],
40
        [
41
            {
42
                "role": "system",
43
                "content": "Provide answers in JavaScript",
44
            },
45
            {
46
                "role": "user",
47
                "content": "Write a function that computes the set of sums of all contiguous sublists of a given list.",
48
            }
49
        ],
50
    ]
51
    results = generator.chat_completion(
52
        instructions,  # type: ignore
53
        max_gen_len=max_gen_len,
54
        temperature=temperature,
55
        top_p=top_p,
56
    )
57
58
    for instruction, result in zip(instructions, results):
59
        for msg in instruction:
60
            print(f"{msg['role'].capitalize()}: {msg['content']}\n")
61
        print(
62
            f"> {result['generation']['role'].capitalize()}: {result['generation']['content']}"
63
        )
64
        print("\n==================================\n")
65
66
67
if __name__ == "__main__":
68
    fire.Fire(main)