I’ve recently started running a local ai stack and I was wondering if I can integrate it with Volumio so we can get PlexAmp-like AI functionality. For fun, I asked ChatGPT if it knows about PlexAmp’s AI integration capabilities and if it can help me code a plug-in for Volumio 4, by reviewing thru the 2 developers’ reference links. I am not an AI expert nor have I created a plugin before, so this is more an exercise for fun and learning. Are there experts of Volumio and AI integration that can chime in to see if this is viable? Maybe we can collaborate and make something the community will find useful.
Plugin file tree
local-ai-playlists/
├─ package.json
├─ index.js
├─ UIConfig.json
├─ config.json
├─ install.sh
├─ uninstall.sh
├─ README.md
└─ lib/
├─ llmClient.js
├─ selector.js
├─ queue.js
└─ artClient.js
README.md
# Local AI Playlists (Volumio 4)
Generate playlists from a natural-language prompt using your **local LLM** (Ollama or OpenAI-compatible server).
Optionally generate artwork via Automatic1111. Target: **Volumio 4 (Bookworm, Node 20+)**.
## Features
- Prompt → LLM plan (genres, seeds) → local library search → queue → play
- Works with **local library** (MPD)
- Optional playlist artwork via **Automatic1111** (txt2img)
- Adds a Browse source ("AI Playlist") for convenience
## Requirements
- Volumio 4 (Bookworm)
- Local LLM:
- **Ollama**: `http://<host>:11434`
- **OpenAI-compatible** (LM Studio, vLLM, OpenWebUI shim…): `/v1/chat/completions`
- Optional: Automatic1111 at `http://<host>:7860`
## Install
1. Copy this folder to your machine and zip it:
```bash
cd local-ai-playlists
zip -r ../local-ai-playlists.zip .
package.json
{
"name": "local-ai-playlists",
"version": "0.2.0",
"description": "Generate playlists from a natural-language prompt using a local LLM, with optional local artwork.",
"main": "index.js",
"author": "You",
"license": "MIT",
"engines": {
"node": ">=20",
"volumio": ">=4"
},
"volumio_info": {
"prettyName": "Local AI Playlists",
"icon": "fa-magic",
"plugin_type": "music_service",
"architectures": ["armhf", "arm64", "amd64"],
"os": ["bookworm"]
},
"dependencies": {
"axios": "^1.7.4",
"mpd": "^2.2.0",
"v-conf": "^1.4.0",
"kew": "^0.7.0"
}
}
index.js
'use strict';
const libQ = require('kew');
const VConf = require('v-conf');
const llm = require('./lib/llmClient');
const selector = require('./lib/selector');
const queue = require('./lib/queue');
const art = require('./lib/artClient');
module.exports = ControllerLocalAI;
function ControllerLocalAI(context) {
this.context = context;
this.commandRouter = context.coreCommand;
this.logger = context.logger;
this.configManager = context.configManager;
this.config = new VConf();
}
ControllerLocalAI.prototype.getConfigurationFiles = function () {
return ['config.json'];
};
ControllerLocalAI.prototype.onVolumioStart = function () {
const defer = libQ.defer();
try {
const configFile = this.commandRouter.pluginManager.getConfigurationFile(this.context, 'config.json');
this.config.loadFile(configFile);
defer.resolve();
} catch (e) {
this.logger.error('[LocalAI] onVolumioStart config load failed: ' + e);
defer.reject(e);
}
return defer.promise;
};
ControllerLocalAI.prototype.onStart = function () {
const defer = libQ.defer();
try {
// Add browse source (optional convenience entry)
this.commandRouter.volumioAddToBrowseSources({
name: 'AI Playlist',
uri: 'ai_playlist',
plugin_type: 'music_service',
plugin_name: 'local-ai-playlists'
});
this.logger.info('[LocalAI] Started');
defer.resolve();
} catch (e) {
this.logger.error('[LocalAI] onStart error: ' + e);
defer.reject(e);
}
return defer.promise;
};
ControllerLocalAI.prototype.onStop = function () {
const defer = libQ.defer();
try {
this.commandRouter.volumioRemoveToBrowseSources('AI Playlist');
this.logger.info('[LocalAI] Stopped');
defer.resolve();
} catch (e) {
this.logger.error('[LocalAI] onStop error: ' + e);
defer.reject(e);
}
return defer.promise;
};
ControllerLocalAI.prototype.getUIConfig = function () {
const defer = libQ.defer();
const langCode = this.commandRouter.sharedVars.get('language_code') || 'en';
this.commandRouter.i18nJson(
__dirname + '/i18n/strings_' + langCode + '.json',
__dirname + '/i18n/strings_en.json',
__dirname + '/UIConfig.json'
).then(uiconf => {
// Populate with current config values
// LLM
uiconf.sections[0].content.find(x => x.id === 'llm_mode').value.value = this.config.get('llm.mode', 'ollama');
uiconf.sections[0].content.find(x => x.id === 'llm_mode').value.label = this.config.get('llm.mode', 'ollama') === 'openai' ? 'OpenAI-compatible' : 'Ollama';
uiconf.sections[0].content.find(x => x.id === 'llm_base').value = this.config.get('llm.baseUrl', 'http://127.0.0.1:11434');
uiconf.sections[0].content.find(x => x.id === 'llm_model').value = this.config.get('llm.model', 'llama3:8b-instruct');
uiconf.sections[0].content.find(x => x.id === 'llm_timeout').value = String(this.config.get('llm.timeoutMs', 30000));
uiconf.sections[0].content.find(x => x.id === 'llm_api_key').value = this.config.get('llm.apiKey', '');
// Artwork
uiconf.sections[1].content.find(x => x.id === 'art_enabled').value = this.config.get('art.enabled', false);
uiconf.sections[1].content.find(x => x.id === 'art_base').value = this.config.get('art.baseUrl', 'http://127.0.0.1:7860');
uiconf.sections[1].content.find(x => x.id === 'art_size').value = this.config.get('art.size', '1024x1024');
// Defaults for prompt UI
uiconf.sections[2].content.find(x => x.id === 'user_prompt').value = '';
uiconf.sections[2].content.find(x => x.id === 'max_tracks').value = '60';
return uiconf;
}).then(uiconf => defer.resolve(uiconf))
.fail(err => {
this.logger.error('[LocalAI] getUIConfig error: ' + err);
defer.reject(err);
});
return defer.promise;
};
// Handle settings save (section 0 & 1)
ControllerLocalAI.prototype.saveSettings = function (data) {
const defer = libQ.defer();
try {
// LLM
this.config.set('llm.mode', data.llm_mode && data.llm_mode.value ? data.llm_mode.value : 'ollama');
this.config.set('llm.baseUrl', data.llm_base || 'http://127.0.0.1:11434');
this.config.set('llm.model', data.llm_model || 'llama3:8b-instruct');
this.config.set('llm.timeoutMs', parseInt(data.llm_timeout || '30000', 10));
this.config.set('llm.apiKey', data.llm_api_key || '');
// Artwork
this.config.set('art.enabled', !!data.art_enabled);
this.config.set('art.baseUrl', data.art_base || 'http://127.0.0.1:7860');
this.config.set('art.size', data.art_size || '1024x1024');
this.commandRouter.pushToastMessage('success', 'Local AI', 'Settings saved');
defer.resolve();
} catch (e) {
this.logger.error('[LocalAI] saveSettings error: ' + e);
this.commandRouter.pushToastMessage('error', 'Local AI', 'Failed to save settings');
defer.reject(e);
}
return defer.promise;
};
// Generate playlist (section 2 button)
ControllerLocalAI.prototype.generatePlaylist = function (data) {
const defer = libQ.defer();
const prompt = (data && data.user_prompt) ? String(data.user_prompt).trim() : '';
const maxTracks = Math.max(1, parseInt((data && data.max_tracks) ? data.max_tracks : '60', 10));
if (!prompt) {
this.commandRouter.pushToastMessage('error', 'Local AI', 'Please enter a prompt');
return libQ.resolve();
}
const cfg = {
llm: {
mode: this.config.get('llm.mode', 'ollama'),
baseUrl: this.config.get('llm.baseUrl', 'http://127.0.0.1:11434'),
model: this.config.get('llm.model', 'llama3:8b-instruct'),
timeoutMs: this.config.get('llm.timeoutMs', 30000),
apiKey: this.config.get('llm.apiKey', '')
},
art: {
enabled: this.config.get('art.enabled', false),
baseUrl: this.config.get('art.baseUrl', 'http://127.0.0.1:7860'),
size: this.config.get('art.size', '1024x1024')
}
};
this.logger.info(`[LocalAI] Generating playlist for prompt: "${prompt}"`);
llm.planFromPrompt(cfg.llm, prompt, maxTracks, this.logger)
.then(plan => selector.findTracks(plan, maxTracks, this.logger))
.then(trackUris => {
if (!trackUris.length) throw new Error('No matching tracks found in your library');
// Build queue items for Volumio (MPD service for local files)
const items = trackUris.map(u => ({ service: 'mpd', uri: u }));
const playlistName = `AI • ${new Date().toISOString().slice(0,16).replace('T',' ')}`;
return queue.replaceQueueAndPlay(this.commandRouter, items)
.then(() => ({ playlistName, count: items.length }));
})
.then(async ({ playlistName, count }) => {
// Optional artwork generation (saved to /data)
if (cfg.art.enabled) {
try {
const imgPath = await art.generate(cfg.art, prompt, this.logger);
this.logger.info('[LocalAI] Artwork saved: ' + imgPath);
} catch (e) {
this.logger.warn('[LocalAI] Artwork generation failed: ' + e);
}
}
this.commandRouter.pushToastMessage('success', 'Local AI', `Created & playing: ${count} tracks`);
defer.resolve();
})
.fail(err => {
this.logger.error('[LocalAI] generatePlaylist error: ' + err);
this.commandRouter.pushToastMessage('error', 'Local AI', (err && err.message) ? err.message : 'Failed to generate playlist');
defer.resolve(); // resolve to close modal gracefully
});
return defer.promise;
};
// Optional: simple Browse source
ControllerLocalAI.prototype.handleBrowseUri = function (curUri) {
const defer = libQ.defer();
if (curUri === 'ai_playlist') {
defer.resolve({
navigation: {
prev: { uri: 'music-library' },
lists: [{
title: 'AI Playlist Generator',
icon: 'fa-magic',
availableListViews: ['list'],
items: [
{
service: 'local-ai-playlists',
type: 'item-no-menu',
title: 'Open plugin settings to generate a playlist',
icon: 'fa-external-link',
uri: 'ai_playlist/open'
}
]
}]
}
});
} else {
defer.reject(new Error('Unknown URI: ' + curUri));
}
return defer.promise;
};
UIConfig.json
{
"page": {
"label": "Local AI Playlists",
"icon": "fa-magic"
},
"sections": [
{
"id": "ai",
"element": "section",
"label": "Local LLM",
"content": [
{ "id": "llm_mode", "type": "dropdown", "label": "API Mode", "options": [
{ "value": "ollama", "label": "Ollama" },
{ "value": "openai", "label": "OpenAI-compatible" }
], "value": { "value": "ollama", "label": "Ollama" } },
{ "id": "llm_base", "type": "text", "label": "Base URL", "value": "http://127.0.0.1:11434" },
{ "id": "llm_model", "type": "text", "label": "Model", "value": "llama3:8b-instruct" },
{ "id": "llm_timeout", "type": "text", "label": "Timeout (ms)", "value": "30000" },
{ "id": "llm_api_key", "type": "password", "label": "API Key (if OpenAI-compatible)", "value": "" }
],
"saveButton": {
"label": "Save LLM Settings",
"data": ["llm_mode", "llm_base", "llm_model", "llm_timeout", "llm_api_key"]
},
"onSave": { "type": "controller", "endpoint": "music_service/local-ai-playlists", "method": "saveSettings" }
},
{
"id": "art",
"element": "section",
"label": "Artwork (optional)",
"content": [
{ "id": "art_enabled", "type": "switch", "label": "Generate artwork", "value": false },
{ "id": "art_base", "type": "text", "label": "Image API URL", "value": "http://127.0.0.1:7860" },
{ "id": "art_size", "type": "text", "label": "Size (WxH)", "value": "1024x1024" }
],
"saveButton": {
"label": "Save Artwork Settings",
"data": ["art_enabled", "art_base", "art_size"]
},
"onSave": { "type": "controller", "endpoint": "music_service/local-ai-playlists", "method": "saveSettings" }
},
{
"id": "prompt",
"element": "section",
"label": "Create Playlist",
"content": [
{ "id": "user_prompt", "type": "textarea", "label": "Describe your mix", "value": "" },
{ "id": "max_tracks", "type": "text", "label": "Max tracks", "value": "60" },
{ "id": "go", "type": "button", "label": "Generate" }
],
"onSave": { "type": "controller", "endpoint": "music_service/local-ai-playlists", "method": "generatePlaylist" }
}
]
}
config.json (defaults)
{
"llm": {
"mode": "ollama",
"baseUrl": "http://127.0.0.1:11434",
"model": "llama3:8b-instruct",
"timeoutMs": 30000,
"apiKey": ""
},
"art": {
"enabled": false,
"baseUrl": "http://127.0.0.1:7860",
"size": "1024x1024"
}
}
install.sh
#!/bin/bash
set -e
PLUGIN_DIR="$(cd "$(dirname "$0")" && pwd)"
echo "Installing dependencies for local-ai-playlists..."
cd "$PLUGIN_DIR"
NODE_ENV=production npm install --omit=dev || npm install --production
echo "Done."
exit 0
uninstall.sh
#!/bin/bash
set -e
echo "Uninstalling local-ai-playlists (no persistent state to clean)..."
exit 0
lib/llmClient.js
'use strict';
const axios = require('axios');
const SYSTEM_PROMPT =
'You are a playlist planning assistant. Return ONLY compact JSON matching this schema:\n' +
'{ "filters": { "genres":[string], "moods":[string], "decades":[string], "min_bpm": number|null, "max_bpm": number|null }, ' +
'"seed_tracks":[{"artist":string,"title":string|null}], "max_tracks": number }\n' +
'No commentary, no markdown, no code fences.';
function tryJson(s) {
try { return JSON.parse(s); } catch { return null; }
}
async function planFromPrompt(llmCfg, userPrompt, maxTracks, logger) {
const prompt = `User request: ${userPrompt}\nReturn JSON with max_tracks=${maxTracks}. Use filters and seed_tracks wisely.`;
if (llmCfg.mode === 'ollama') {
const url = llmCfg.baseUrl.replace(/\/$/, '') + '/api/generate';
const res = await axios.post(url, {
model: llmCfg.model,
prompt: `${SYSTEM_PROMPT}\n\n${prompt}`,
stream: false
}, { timeout: llmCfg.timeoutMs });
const text = (res && res.data && res.data.response) ? String(res.data.response).trim() : '';
const json = tryJson(text);
if (!json) throw new Error('LLM did not return valid JSON');
logger.info('[LocalAI] LLM plan received');
return json;
} else {
// OpenAI-compatible /v1/chat/completions
const url = llmCfg.baseUrl.replace(/\/$/, '') + '/v1/chat/completions';
const headers = llmCfg.apiKey ? { Authorization: 'Bearer ' + llmCfg.apiKey } : {};
const res = await axios.post(url, {
model: llmCfg.model,
messages: [
{ role: 'system', content: SYSTEM_PROMPT },
{ role: 'user', content: prompt }
],
temperature: 0.2
}, { timeout: llmCfg.timeoutMs, headers });
const text = res?.data?.choices?.[0]?.message?.content?.trim() || '';
const json = tryJson(text);
if (!json) throw new Error('LLM did not return valid JSON');
logger.info('[LocalAI] LLM plan received (OpenAI-compatible)');
return json;
}
}
module.exports = { planFromPrompt };
lib/selector.js
'use strict';
const mpd = require('mpd');
const cmd = mpd.cmd;
/**
* Minimal plan→tracks resolver using MPD search.
* Extensible: add BPM/mood/decade filtering if your tags carry them.
*/
function connect() {
return new Promise((resolve, reject) => {
const client = mpd.connect({ host: '127.0.0.1', port: 6600 });
client.on('ready', () => resolve(client));
client.on('error', reject);
});
}
function parseKeyVals(str) {
const lines = (str || '').split('\n');
const files = [];
let cur = null;
for (const ln of lines) {
if (!ln.trim()) continue;
const idx = ln.indexOf(': ');
if (idx < 0) continue;
const k = ln.slice(0, idx);
const v = ln.slice(idx + 2);
if (k === 'file') {
if (cur && cur.file) files.push(cur.file);
cur = { file: v };
} else if (cur) {
cur[k] = v;
}
}
if (cur && cur.file) files.push(cur.file);
return files;
}
async function mpdSearch(tag, value) {
const c = await connect();
const msg = await new Promise((resolve, reject) =>
c.sendCommand(cmd('search', [tag, value]), (e, m) => e ? reject(e) : resolve(m))
);
c.close();
return parseKeyVals(msg);
}
async function findTracks(plan, limit, logger) {
const chosen = new Set();
// Genres
if (plan.filters?.genres?.length) {
for (const g of plan.filters.genres) {
const hits = await mpdSearch('genre', g);
hits.forEach(u => chosen.add(u));
}
}
// Seed artists / titles
if (plan.seed_tracks?.length) {
for (const st of plan.seed_tracks) {
if (st.artist) {
const hits = await mpdSearch('artist', st.artist);
hits.forEach(u => chosen.add(u));
}
if (st.title) {
const hits = await mpdSearch('title', st.title);
hits.forEach(u => chosen.add(u));
}
}
}
// Moods / decades / BPM — optional: implement tag-based refinement here
// For now we rely on genre+artist seeds.
const out = Array.from(chosen).slice(0, limit || 60);
logger.info(`[LocalAI] Selector picked ${out.length} tracks`);
// MPD file paths are appropriate URIs for 'mpd' service via addQueueItems
return out.map(u => 'music-library/' + u);
}
module.exports = { findTracks };
lib/queue.js
'use strict';
/**
* Use Volumio core commands to keep state machine happy.
*/
async function replaceQueueAndPlay(commandRouter, items) {
// Clear queue, add items, start playback
await commandRouter.volumioClearQueue();
await commandRouter.addQueueItems(items);
await commandRouter.volumioPlay();
}
module.exports = { replaceQueueAndPlay };
lib/artClient.js
'use strict';
const fs = require('fs');
const path = require('path');
const axios = require('axios');
/**
* Save artwork under /data/local-ai-playlists/
* (Volumio will not automatically show playlist art; you can use it in your own UI or notes)
*/
const OUT_DIR = '/data/local-ai-playlists';
async function generate(cfg, userPrompt, logger) {
if (!fs.existsSync(OUT_DIR)) fs.mkdirSync(OUT_DIR, { recursive: true });
let width = 1024, height = 1024;
if (cfg.size && typeof cfg.size === 'string' && cfg.size.includes('x')) {
const [w, h] = cfg.size.split('x').map(Number);
if (w && h) { width = w; height = h; }
}
// Automatic1111 txt2img
const url = cfg.baseUrl.replace(/\/$/, '') + '/sdapi/v1/txt2img';
const res = await axios.post(url, {
prompt: `Minimal high-contrast cover art representing: ${userPrompt}`,
steps: 20,
width,
height,
cfg_scale: 5
}, { timeout: 120000 });
const b64 = res?.data?.images?.[0];
if (!b64) throw new Error('Image generation failed');
const file = path.join(OUT_DIR, `ai-cover-${Date.now()}.png`);
fs.writeFileSync(file, Buffer.from(b64, 'base64'));
logger.info('[LocalAI] Artwork written: ' + file);
return file;
}
module.exports = { generate };
- Open Settings → Plugins → Local AI Playlists:
- Set LLM Mode (Ollama or OpenAI-compatible)
- Set Base URL and Model
- (Optional) Enable Artwork and set Image API URL
Usage
- Go to Plugins → Local AI Playlists → Create Playlist.
- Enter a prompt (e.g. “late-night downtempo with female vocals, 90–110 bpm”).
- Click Generate. The queue is replaced with AI-picked tracks and playback starts.
Logs & Troubleshooting
- Watch logs:
journalctl -fu volumio
- Ensure your library is indexed (MPD lists tracks).
- If no tracks: try more specific genres/artists you own.
- If artwork fails: check your Automatic1111 endpoint.
Uninstall
volumio plugin disable local-ai-playlists
volumio plugin uninstall local-ai-playlists
Notes
- This plugin uses Volumio core commands to manage the queue, keeping the state machine intact.
- Playlist “artwork” is saved under
/data/local-ai-playlists/for your reference.
---
# 🚀 Deploy & test (step-by-step)
1) **Zip and install**
```bash
cd local-ai-playlists
zip -r ../local-ai-playlists.zip .
# Copy zip to Volumio, then:
ssh volumio@<volumio-ip>
volumio plugin install /home/volumio/local-ai-playlists.zip
volumio plugin enable local-ai-playlists
Configure
In Volumio UI: Plugins → Local AI Playlists → Local LLM
Mode: Ollama (or OpenAI-compatible)
Base URL: e.g. http://[AI STACK IP]:11434 (Ollama)
Model: e.g. llama3:8b-instruct
Save.
(Optional) Artwork section → enable and set http://[AI STACK IP]:7860 → Save.
Generate
Create Playlist section → type your prompt → set Max tracks → Generate.
Volumio will clear the queue, add tracks, and start playing.
Notes / customizations
Better matching: extend lib/selector.js to read more tags (mood, bpm, decade) using listallinfo and filter in JS.
Mixed sources: if you later target Spotify/YouTube, you’ll need to map LLM picks to URIs that those service plugins understand (and set service accordingly).
Security: if your LLM/image endpoints sit on other VLANs, add auth headers in llmClient.js / artClient.js.