-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample.py
More file actions
36 lines (28 loc) · 933 Bytes
/
example.py
File metadata and controls
36 lines (28 loc) · 933 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from aimodel import MLModel, create_app
# Create your model with 2 functions: load_model and generate
class MyModel:
def load_model(self):
return self
def generate(self):
return "This is just a dummy answer."
# Create a function that will be used to answer the prompts
def answer(model: object, prompts:list[str], max_tokens:int) -> list[str]:
response = []
for prompt in prompts:
response.append(model.generate())
return response
# Create an instance of your model
my_model = MyModel()
# Create an instance of MLModel and add your model
models = MLModel(load_default=False)
# Add your model to the models
models.add_model(
model_name="My own model",
load_model=my_model.load_model,
answer=answer,
)
# Create the app
app = create_app(models=models)
if __name__ == '__main__':
# Run the app
app.run(port=5000, ssl_context='adhoc')