import flamafrom sklearn.neural_network import MLPClassifier
model = MLPClassifier(activation="tanh", max_iter=2000, hidden_layer_sizes=(10,))model.fit( np.array([[0, 0], [0, 1], [1, 0], [1, 1]]), np.array([0, 1, 1, 0]),)
flama.dump(model, "sklearn_model.flm")
> flama serve path/to/model.flm
INFO: Started server process [78260]INFO: Waiting for application startup.INFO: Application startup complete.INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
Effortless Development
There is a wide spectrum of data validation libraries for Python to combine data types into structures, validate them, and provide tools for serialisation of app-level objects to primitive Python types.
Flama natively supports both Typesystem and Marshmallow, data type validation libraries which make possible the standardisation of the API via generation of OpenAPI schemas, and allow the user to define API schemas effortlessly.
Flama Schema generator gathers all the API information needed directly from your code and infers the schema that represents your API based on OpenAPI standard. The schema will be also served at the route /schema/
by default.
Models Lifecycle
from flama import Flama, ModelComponentBuilder
with open("/path/to/model.flm", "rb") as f: component = ModelComponentBuilder.loads(f.read()) ModelType = component.get_model_type # Get the type to allow inject dependency
app = Flama(components=[component])
@app.get("/")def model_view(model: ModelType, model_input: str): """ tags: - model summary: Model prediction. description: Interact with the model to generate a prediction based on given input responses: 200: description: Model prediction. """ model_output = model.predict(model_input) return {"model_output": model_output}
Extensibility
import typing
import mlflowfrom flama import Module, Flama
class MLFlowModule(Module): name = "mlflow"
def __init__(self, app: Flama, url: str = None, *args, **kwargs): super().__init__(app, *args, **kwargs) self.url = url
async def on_startup(self): mlflow.set_tracking_uri(self.url)
async def on_shutdown(self): ...
def search_runs(self, experiment_ids: typing.List[str], filter_string: str): return mlflow.search_runs(experiment_ids, filter_string)
app = Flama(modules=[MLFlowModule])
# Module usage examplemodel = app.mlflow.search_runs(["foo"], "tags.name = 'bar'")
Development Tools
