AiAssistant: Remove LUA plugin

Change-Id: I6e48b5bbbbafd6cc33972b514de5c7518b995a37
Reviewed-by: Marcus Tillmanns <marcus.tillmanns@qt.io>
This commit is contained in:
Mariusz Szczepanik
2024-09-03 13:12:58 +02:00
committed by mua
parent b6dd5f93fc
commit 46b6f75b40
7 changed files with 0 additions and 588 deletions

View File

@@ -1,129 +0,0 @@
-- Copyright (C) 2024 The Qt Company Ltd.
-- SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
return {
Id = "aiassistant",
Name = "Qt AI Assistant",
Version = "1.0.0",
CompatVersion = "1.0.0",
VendorId = "theqtcompany",
Vendor = "The Qt Company",
Category = "Language Client",
Description = "Qt AI Assistant",
Experimental = true,
DisabledByDefault = true,
LongDescription = [[
Qt AI Assistant is a coding assistant. When connected to a Large Language
Model (LLM), it auto-completes your code, as well as writes test cases and
code documentation.
Qt AI Assistant is available for selected commercial Qt developer
license holders. For more information on licensing, select `Compare`
in [Qt pricing](https://www.qt.io/pricing).
> **Note:** Qt AI Assistant is LLM-agnostic. The subscription to a third-party
LLM service or a third-party LLM for local or cloud deployment is not a part
of it. You need to connect to a third-party LLM and agree to the terms and
conditions, as well as to the acceptable use policy of the LLM provider. By
using Qt AI Assistant, you agree to
[Terms & Conditions - Qt Development Framework](https://www.qt.io/terms-conditions/qt-dev-framework).
Qt AI Assistant is currently experimental and powered by generative AI. Check
all suggestions to make sure that they are fit for use in your project.
> **Note:** [Install and load](https://doc.qt.io/qtcreator/creator-how-to-load-extensions.html)
the Qt AI Assistant extension to use it.
## Connect to a LLM
You can connect to the following LLMs:
- Meta Llama 3.1 70B (running in a cloud deployment of your choice
- Anthropic Claude 3.5 Sonnet (provided as subscription-based service by Anthropic)
To connect to a LLM:
1. Go to `Preferences` > `AI Assistant`
1. Select the use cases and programming languages to use the LLM for
1. Enter the authentication token, user name, and API URL of the LLM.
For more information on where to get the access information, see the
third-party LLM provider documentation.
## Automatic code-completion
Qt AI Assistant can help you write code by suggesting what to write next.
It prompts the LLM to make one or several code suggestions based on the
current cursor position and the code before and after the cursor when you
stop typing. The code suggestions are shown after the cursor in grey color.
To accept the entire suggestion, press the `Tab` key.
To accept parts of the suggestions, press `Alt+Right`.
To dismiss the suggestion, press `Esc` or navigate to another position in
the code editor.
To interact with Qt AI Assistant using the mouse, hover over the suggestion.
When you hover over a suggestion, you can accept parts of the suggested code
snippet word-by-word. Or, cycle through alternative suggestions in the code
completion bar by selecting the `<` and `>` buttons.
To close the code completion bar, press `Esc` key or move the cursor to
another position.
To turn auto-completion of code on or off globally for all projects, go to
`Preferences` > `AI Assistant`. Qt AI Assistant consumes a significant number
of tokens from the LLM. To cut costs, disable the auto-completion feature when
not needed, and use keyboard shortcuts for code completion.
## Complete code from the keyboard
To trigger code suggestions manually, press `Ctrl+Shift+A` (`Cmd+Shift+A` on macOS).
## Chat with the assistant
In an inline chat window in the code editor, you can prompt the assistant to
implement your requests in human language, ask questions, or execute
*smart commands*. To open the chat, press `Ctrl+Shift+D` (`Cmd+Shift+D` on macOS).
To close the chat, press `Esc` or select the `Close` button.
To go to Qt AI Assistant preferences from the chat, select the `Settings` button.
### Request suggestions using human language
To request suggestions using human language, type your requests in the chat.
Qt AI Assistant shows a suggestion that you can copy to the clipboard by
selecting the `Copy` button in the chat.
### Request test cases in Qt Test syntax
To write test cases with Qt AI Assistant:
- Highlight code in the code editor
- Press `Ctrl+Shift+D` (`Cmd+Shift+D` on macOS) to open the chat
- Select the `qtest` smart command.
Qt AI Assistant generates a test case in [Qt Test](https://doc.qt.io/qt-6/qttest-index.html)
format that you can copy and paste to your
[Qt test project](https://doc.qt.io/qtcreator/creator-how-to-create-qt-tests.html).
]],
Dependencies = {
{ Id = "lua", Version = "14.0.0" },
{ Id = "lualanguageclient", Version = "14.0.0" }
},
hooks = {
editors = {
text = {
contentsChanged = function(document, position, charsRemoved, charsAdded)
require 'init'.Hooks.onDocumentContentsChanged(document, position, charsRemoved, charsAdded)
end,
currentChanged = function(editor)
require 'init'.Hooks.onCurrentChanged(editor)
end,
}
}
},
setup = function()
require 'init'.setup()
end,
} --[[@as QtcPlugin]]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 355 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 605 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 405 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 656 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 251 B

View File

@@ -1,459 +0,0 @@
-- Copyright (C) 2024 The Qt Company Ltd.
-- SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0-only WITH Qt-GPL-exception-1.0
local LSP = require('LSP')
local Utils = require('Utils')
local S = require('Settings')
local Gui = require('Gui')
local a = require('async')
local TextEditor = require('TextEditor')
local mm = require('MessageManager')
local fetch = require('Fetch').fetch
local Install = require('Install')
local Action = require("Action")
Hooks = {}
AutoSuggestionDelay = 2000
ServerName = "qtaiassistantserver"
InlineChatActive = false
local function collectSuggestions(responseTable)
local suggestions = {}
if type(responseTable.result) == "table" and type(responseTable.result.completions) == "table" then
for _, completion in pairs(responseTable.result.completions) do
if type(completion) == "table" then
local text = completion.text or ""
local startLine = completion.range and completion.range.start and completion.range.start.line or 0
local startCharacter = completion.range and completion.range.start and completion.range.start.character or 0
local endLine = completion.range and completion.range["end"] and completion.range["end"].line or 0
local endCharacter = completion.range and completion.range["end"] and completion.range["end"].character or 0
local suggestion = TextEditor.Suggestion.create(startLine, startCharacter, endLine, endCharacter, text)
table.insert(suggestions, suggestion)
end
end
end
return suggestions
end
local function createCommand()
local cmd = { Settings.binary.expandedValue:nativePath() }
return cmd
end
local function createInitOptions()
local llm_config = {
cppLLM = Settings.cppLLM.dataValue,
qmlLLM = Settings.qmlLLM.dataValue,
otherLLM = Settings.otherLLM.dataValue,
debugLLM = Settings.debugLLM.dataValue,
reviewLLM = Settings.reviewLLM.dataValue,
explainLLM = Settings.explainLLM.dataValue
}
local auth_token_config = {
authTokenLama35 = Settings.authTokenLlama3.value,
authTokenClaude35 = Settings.authTokenClaude35.value
}
return {
llm_config = llm_config,
auth_token_config = auth_token_config
}
end
local function installOrUpdateServer()
local data = a.wait(fetch({
url = "https://qtccache.qt.io/QtAIAssistant/LatestRelease",
convertToTable = true
}))
if not data then
print("Failed to fetch release data.")
return
end
local id = string.format("%.0f", data["id"])
print("Found version:", id)
local lspPkgInfo = Install.packageInfo(ServerName)
if not lspPkgInfo or lspPkgInfo.version ~= id then
local osTr = { mac = "macos", windows = "windows", linux = "ubuntu" }
local os = osTr[Utils.HostOsInfo.os]
print("Attempt to download server for: ", Utils.HostOsInfo.os)
local expectedFileName = "project-build-" .. os
local asset = nil
for _, a in ipairs(data["assets"]) do
if string.find(a.name, expectedFileName, 1, true) then
asset = a
break
end
end
if not asset then
print("No assets found for this platform. Expected file base name:", expectedFileName)
return
end
local assetId = string.format("%.0f", asset["id"])
local downloadUrl = "https://qtccache.qt.io/QtAIAssistant/Asset?assetId=" .. assetId
print("Using download URL:", downloadUrl)
local res, err = a.wait(Install.install(
"Do you want to install the " .. ServerName .. "?", {
name = ServerName,
url = downloadUrl,
version = id
}))
if not res then
mm.writeFlashing("Failed to install " .. ServerName .. ": " .. err .. ". Please make sure you have 7z installed for package extraction.")
return
end
lspPkgInfo = Install.packageInfo(ServerName)
print("Installed:", lspPkgInfo.name, " version:", lspPkgInfo.version, " at:", lspPkgInfo.path)
end
local binary = ServerName
if Utils.HostOsInfo.isWindowsHost() then
binary = binary .. ".exe"
end
Settings.binary:setValue(lspPkgInfo.path:resolvePath(binary))
Settings:apply()
end
IsTryingToInstall = false
local function setupClient()
Client = LSP.Client.create({
name = 'AI Assistant Server',
cmd = createCommand,
transport = 'stdio',
initializationOptions = createInitOptions,
languageFilter = {
patterns = { '*.*' },
},
settings = Settings,
startBehavior = "AlwaysOn",
showInSettings = false,
onStartFailed = function()
a.sync(function()
if IsTryingToInstall == true then
return
end
IsTryingToInstall = true
installOrUpdateServer()
IsTryingToInstall = false
end)()
end
})
end
local function using(tbl)
local result = _G
for k, v in pairs(tbl) do result[k] = v end
return result
end
local function layoutSettings()
local _ENV = using(Gui)
local layout = Form {
Settings.binary, br,
Row {
PushButton {
text = "Try to install AI Assistant Server",
onClicked = function() a.sync(installOrUpdateServer)() end,
br,
},
st
},
br,
Settings.cppLLM, br,
Settings.qmlLLM, br,
Settings.otherLLM, br,
Settings.debugLLM, br,
Settings.reviewLLM, br,
Settings.explainLLM, br,
Settings.authTokenLlama3, br,
Settings.authTokenClaude35
}
return layout
end
local available_llms = {
{ name = "Llama 3 70B Fine-Tuned", data = "Llama3" },
{ name = "Claude 3.5 Sonnet", data = "Claude35" }
}
local function createSelectionAspect(settingsKey, displayName)
return S.SelectionAspect.create({
settingsKey = settingsKey,
options = available_llms,
displayStyle = S.SelectionDisplayStyle.ComboBox,
displayName = displayName
})
end
local function addLLMSetting(keySuffix, displayText)
Settings[keySuffix] = createSelectionAspect("AIAssistant." .. keySuffix, displayText)
end
local function addAuthTokenSetting(llm_name, displayText)
Settings["authToken" .. llm_name] = S.StringAspect.create({
settingsKey = "AIAssistant.AuthToken." .. llm_name,
labelText = displayText .. ":",
displayStyle = S.StringDisplayStyle.LineEdit,
defaultValue = "AUTH_TOKEN",
})
end
local function setupAspect()
Settings = S.AspectContainer.create({
autoApply = false,
layouter = layoutSettings,
})
Settings.binary = S.FilePathAspect.create({
settingsKey = "AIAssistant.Binary",
displayName = "Binary",
labelText = "Binary:",
toolTip = "The path to the AI Assistant Server",
expectedKind = S.Kind.ExistingCommand,
defaultPath = Utils.FilePath.fromUserInput("/path/to/server"),
})
addLLMSetting("cppLLM", "LLM for C++:")
addLLMSetting("qmlLLM", "LLM for QML:")
addLLMSetting("otherLLM", "LLM for other languages:")
addLLMSetting("debugLLM", "LLM for debug:")
addLLMSetting("reviewLLM", "LLM for review")
addLLMSetting("explainLLM", "LLM for explain:")
addAuthTokenSetting("Llama3", "Llama 3 API authentication Token")
addAuthTokenSetting("Claude35", "Claude 3.5 API authentication Token")
Options = S.OptionsPage.create({
aspectContainer = Settings,
categoryId = "AIAssistant.OptionsPage",
displayName = tr("AI Assistant"),
id = "AIAssistant.Settings",
displayCategory = "AI Assistant",
categoryIconPath = PluginSpec.pluginDirectory:resolvePath("images/settingscategory_ai_assistant.png")
})
return Settings
end
local function buildRequest()
local editor = TextEditor.currentEditor()
if editor == nil then
print("No editor found")
return
end
local document = editor:document()
local filePath = document:file()
local ok, doc_version = Client:documentVersion(filePath)
if not ok then
print("No document version found:", doc_version)
return
end
local ok, doc_uri = Client:hostPathToServerUri(filePath)
if not ok then
print("No document uri found", doc_uri)
return
end
local main_cursor = editor:cursor():mainCursor()
local block = main_cursor:blockNumber()
local column = main_cursor:columnNumber()
local request_msg = {
jsonrpc = "2.0",
method = "getCompletionsCycling",
params = {
doc = {
position = {
character = column,
line = block
},
uri = doc_uri,
version = doc_version
}
}
}
return request_msg
end
local function completionResponseCallback(response)
print("completionResponseCallback() called")
local editor = TextEditor.currentEditor()
if editor == nil then
print("No editor found")
return
end
local suggestions = collectSuggestions(response)
if next(suggestions) == nil then
print("No suggestions found")
return
end
local document = editor:document()
document:setSuggestions(suggestions)
end
local function sendRequest(request)
print("sendRequest() called")
if Client == nil then
print("No client found")
return
end
local editor = TextEditor.currentEditor()
if editor == nil or editor == "" then
print("No editor found")
return
end
local document = editor:document()
local result = a.wait(Client:sendMessageWithIdForDocument(document, request))
completionResponseCallback(result)
end
local function requestSuggestions()
local main_cursor = TextEditor.currentEditor():cursor():mainCursor()
if main_cursor == nil then
print("No cursor found")
return
end
if(main_cursor:hasSelection()) then
print("Ignoring requestSuggestions() due to cursor selection")
return
end
local request_msg = buildRequest()
if(request_msg == nil) then
print("requestSuggestions() failed to build request message")
return
end
sendRequest(request_msg)
end
local function activateInlineChat()
print("activateInlineChat() called")
if InlineChatActive == true then
print("Inline chat is already active")
return
end
InlineChatActive = true
local _ENV = using(Gui)
local closeButton = PushButton {
iconPath = PluginSpec.pluginDirectory:resolvePath("images/inlinechat_close_lua.png"),
flat = true,
onClicked = function()
ChatWidget:close()
InlineChatActive = false
end
}
local settingsButton = PushButton {
iconPath = PluginSpec.pluginDirectory:resolvePath("images/inlinechat_settings_lua.png"),
flat = true,
onClicked = function()
Options:show()
end
}
local chatInput = S.StringAspect.create({
displayStyle = S.StringDisplayStyle.TextEdit,
placeHolderText = "Search",
macroExpander = Null
})
ChatWidget = Widget {
size = {538, 68},
autoFillBackground = true,
Column {
Row {
chatInput, settingsButton, closeButton,
},
Label {
text = "Write something and you'll see results here.",
},
}
}
local editor = TextEditor.currentEditor()
local main_cursor = editor:cursor():mainCursor()
local pos = main_cursor:position()
editor:addFloatingWidget(ChatWidget, pos)
end
local function requestSuggestionsSafe()
local suggestion = TextEditor.currentSuggestion()
if suggestion ~= nil then
if suggestion:isLocked() then
print("Ignoring suggestion due to locked suggestion")
return
end
end
local ok, err = pcall(requestSuggestions)
if not ok then
print("echo Error fetching: " .. err)
end
end
AutoSuggestionTimer = Utils.Timer.create(AutoSuggestionDelay, true,
function() a.sync(requestSuggestionsSafe)() end)
function Hooks.onDocumentContentsChanged(document, position, charsRemoved, charsAdded)
print("onDocumentContentsChanged() called, position, charsRemoved, charsAdded:", position, charsRemoved, charsAdded)
AutoSuggestionTimer:start()
end
function Hooks.onCurrentChanged(editor)
print("onCurrentChanged() called")
AutoSuggestionTimer:stop()
end
local function setup(parameters)
setupAspect()
setupClient()
Action.create("Trigger.suggestions", {
text = "Trigger AI suggestions",
onTrigger = function() a.sync(requestSuggestionsSafe)() end,
defaultKeySequences = { "Meta+Shift+Alt+A", "Ctrl+Shift+Alt+A" },
})
Action.create("Trigger.inlineChat", {
text = "Trigger AI Inline Chat",
onTrigger = activateInlineChat,
defaultKeySequences = { "Meta+Shift+A", "Ctrl+Shift+A" },
})
end
return {
setup = function() a.sync(setup)() end,
Hooks = Hooks,
}