Get up and running with the LEAP SDK in minutes. Install the SDK, load models, and start generating content on Android, iOS, macOS, and more.
Latest version: v0.10.0 (preview)
The LEAP SDK is a Kotlin Multiplatform library. It supports Android, iOS, macOS, JVM, and more from a single codebase. Choose your platform below to get started.
The LeapModelDownloader runs as a foreground service and displays notifications during downloads. Add the following permissions to your AndroidManifest.xml:
The POST_NOTIFICATIONS permission requires a runtime permission request on Android 13 (API 33) and above. See the code example in step 4 for how to request this permission.
No special permissions are required on iOS or macOS. The SDK handles network access and file storage automatically.
The SDK uses GGUF manifests for loading models. Given a model name and quantization (from the LEAP Model Library), the SDK automatically downloads the necessary files and loads the model with optimal parameters.
Kotlin
Swift
Using LeapModelDownloader (recommended for Android)LeapModelDownloader provides background downloads with WorkManager integration and notification support.ViewModel
import android.app.Applicationimport androidx.lifecycle.AndroidViewModelimport androidx.lifecycle.viewModelScopeimport ai.liquid.leap.Conversationimport ai.liquid.leap.ModelRunnerimport ai.liquid.leap.model_downloader.LeapModelDownloaderimport ai.liquid.leap.model_downloader.LeapModelDownloaderNotificationConfigimport kotlinx.coroutines.Dispatchersimport kotlinx.coroutines.flow.MutableStateFlowimport kotlinx.coroutines.flow.StateFlowimport kotlinx.coroutines.flow.asStateFlowimport kotlinx.coroutines.launchimport kotlinx.coroutines.runBlockingclass ChatViewModel(application: Application) : AndroidViewModel(application) { private val modelDownloader = LeapModelDownloader( application, notificationConfig = LeapModelDownloaderNotificationConfig.build { notificationTitleDownloading = "Downloading AI model..." notificationTitleDownloaded = "Model ready!" notificationContentDownloading = "Please wait while the model downloads" } ) private var modelRunner: ModelRunner? = null private var conversation: Conversation? = null private val _isLoading = MutableStateFlow(false) val isLoading: StateFlow<Boolean> = _isLoading.asStateFlow() private val _downloadProgress = MutableStateFlow(0f) val downloadProgress: StateFlow<Float> = _downloadProgress.asStateFlow() private val _errorMessage = MutableStateFlow<String?>(null) val errorMessage: StateFlow<String?> = _errorMessage.asStateFlow() fun loadModel() { viewModelScope.launch { _isLoading.value = true _errorMessage.value = null try { modelRunner = modelDownloader.loadModel( modelSlug = "LFM2-1.2B", quantizationSlug = "Q5_K_M", progress = { progressData -> _downloadProgress.value = progressData.progress } ) conversation = modelRunner?.createConversation() _isLoading.value = false } catch (e: Exception) { _errorMessage.value = "Failed to load model: ${e.message}" _isLoading.value = false } } } override fun onCleared() { super.onCleared() runBlocking(Dispatchers.IO) { modelRunner?.unload() } }}
Alternative: Using LeapDownloader (Cross-Platform)
For cross-platform projects or if you don’t need Android-specific features, use LeapDownloader from the core leap-sdk module:
import ai.liquid.leap.LeapDownloaderimport ai.liquid.leap.LeapDownloaderConfiglifecycleScope.launch { try { val baseDir = File(context.filesDir, "model_files").absolutePath val modelDownloader = LeapDownloader(config = LeapDownloaderConfig(saveDir = baseDir)) val modelRunner = modelDownloader.loadModel( modelSlug = "LFM2-1.2B", quantizationSlug = "Q5_K_M" ) } catch (e: LeapModelLoadingException) { Log.e(TAG, "Failed to load the model. Error message: ${e.message}") }}
This approach works on all platforms (Android, iOS, macOS, JVM) but doesn’t provide Android-specific features like background downloads or notifications.
Legacy: Executorch Bundles
Browse the Leap Model Library to download a model bundle.Push the bundle to the device:
import LeapSDK@MainActorfinal class ChatViewModel: ObservableObject { @Published var isLoading = false @Published var conversation: Conversation? private var modelRunner: ModelRunner? private var generationTask: Task<Void, Never>? func loadModel() async { isLoading = true defer { isLoading = false } do { let modelRunner = try await Leap.load( model: "LFM2-1.2B", quantization: "Q5_K_M", downloadProgressHandler: { progress, speed in // progress: Double (0...1), speed: bytes per second } ) conversation = modelRunner.createConversation( systemPrompt: "You are a helpful assistant." ) self.modelRunner = modelRunner } catch { print("Failed to load model: \(error)") } }}
Legacy: Executorch Bundles
Browse the Leap Model Library and download a .bundle file.Ship it with your app by dragging the bundle into your Xcode project, or download at runtime using LeapModelDownloader.
guard let bundleURL = Bundle.main.url( forResource: "LFM2-350-ENJP-MT", withExtension: "bundle") else { assertionFailure("Model bundle missing") return}let modelRunner = try await Leap.load(url: bundleURL)let conversation = modelRunner.createConversation( systemPrompt: "You are a helpful assistant.")
Override runtime settings with LiquidInferenceEngineOptions: