Skip to content

Commit

Permalink
store api key
Browse files Browse the repository at this point in the history
  • Loading branch information
aj47 committed Aug 20, 2024
1 parent dfb7254 commit 8fbce5b
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 76 deletions.
66 changes: 50 additions & 16 deletions clickolas-cage/src/background/background.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ const processResponse = async (request, sender, sendResponse) => {
currentPlan: [],
originalPrompt: request.prompt,
isExecuting: true,
stopRequested: false
stopRequested: false,
})
sendMessageToTab(currentState.targetTab, { type: 'execution_started' })
const responseJSON = await promptToFirstStep(
Expand Down Expand Up @@ -266,12 +266,14 @@ const processResponse = async (request, sender, sendResponse) => {
await updateModelAndProvider(request.model, request.provider, request.apiKey)
break
case 'getModelAndProvider':
const apiKey = await getApiKey()
await initializeOpenAI(apiKey, currentState.currentModel, currentState.currentProvider)
sendResponse({
currentModel: currentState.currentModel,
currentProvider: currentState.currentProvider,
currentApiKey: currentState.currentApiKey,
currentApiKey: apiKey,
})
return // Add this line to prevent further execution
return true // Indicate that we're sending a response asynchronously
case 'user_message':
if (!currentState.isExecuting) {
updateState({ isExecuting: true, stopRequested: false })
Expand All @@ -287,17 +289,17 @@ const processResponse = async (request, sender, sendResponse) => {
null, // notFoundElement
currentState.currentModel,
currentState.currentProvider,
request.message // Add the user's message to the LLM input
request.message, // Add the user's message to the LLM input
)
// Check if stop was requested while waiting for LLM response
if (getState().stopRequested) {
console.log('Execution stopped, discarding LLM response')
updateState({ stopRequested: false })
break;
break
}
console.log('Next step from LLM:', JSON.stringify(nextStepWithElements))
await addStepToPlan(nextStepWithElements)
break;
break
case 'stop_execution':
updateState({ isExecuting: false, stopRequested: true })
// Cancel any ongoing tasks or timers here
Expand All @@ -313,27 +315,59 @@ const processResponse = async (request, sender, sendResponse) => {
}
}

// Add these functions for handling secure storage
const saveApiKey = (apiKey) => {
return new Promise((resolve) => {
chrome.storage.sync.set({ apiKey: apiKey }, resolve)
})
}

const getApiKey = () => {
return new Promise((resolve) => {
chrome.storage.sync.get(['apiKey'], (result) => {
resolve(result.apiKey || null)
})
})
}

// Modify the updateModelAndProvider function
const updateModelAndProvider = async (model, provider, apiKey) => {
updateState({ currentModel: model, currentProvider: provider, currentApiKey: apiKey })
updateState({ currentModel: model, currentProvider: provider })
if (apiKey) {
await saveApiKey(apiKey)
updateState({ currentApiKey: apiKey })
}
console.log(apiKey, 'apiKey - updated')
await initializeOpenAI(apiKey, model, provider)
}

// Initialize the state with the API key on startup
;(async () => {
const apiKey = await getApiKey()
updateState({ currentApiKey: apiKey })
})()

chrome.commands.onCommand.addListener((command) => {
if (command === 'open-extension') {
chrome.tabs.create({ url: 'popup.html' })
}
})

chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
processResponse(request, sender, sendResponse)
.then(() => {
sendResponse('completed')
})
.catch((error) => {
console.error('Error processing response:', error)
sendResponse('error')
})
return true // Indicate that the response is asynchronous
if (request.type === 'getModelAndProvider') {
processResponse(request, sender, sendResponse)
return true // Indicate that we will send a response asynchronously
} else {
processResponse(request, sender, sendResponse)
.then(() => {
sendResponse('completed')
})
.catch((error) => {
console.error('Error processing response:', error)
sendResponse('error')
})
return true // Indicate that the response is asynchronous
}
})

/**
Expand Down
3 changes: 0 additions & 3 deletions clickolas-cage/src/llm-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,6 @@ export const initializeOpenAI = (apiKey, model, provider) => {
});
};

// Initialize with default values
initializeOpenAI(null, DEFAULT_MODEL, DEFAULT_PROVIDER);

/**
* Wrapper function for OpenAI chat completion calls with logging.
* @param {Object} messages - The messages payload to send to the OpenAI API.
Expand Down
126 changes: 69 additions & 57 deletions clickolas-cage/src/popup/Popup.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,15 @@ const Popup = () => {
const [customModel, setCustomModel] = useState('')
const [showSettings, setShowSettings] = useState(false)
const [apiKey, setApiKey] = useState('')
const [isLoadingSettings, setIsLoadingSettings] = useState(true)

useEffect(() => {
const loadModelAndProvider = async () => {
try {
const response = await sendMessageToBackgroundScript({ type: 'getModelAndProvider' })
setIsLoadingSettings(true)
const response = await new Promise((resolve) => {
chrome.runtime.sendMessage({ type: 'getModelAndProvider' }, resolve);
});
if (response && response.currentModel && response.currentProvider) {
setModel(response.currentModel)
setProvider(response.currentProvider)
Expand All @@ -47,6 +51,8 @@ const Popup = () => {
// Set default values if there's an error
setModel('gemini-1.5-flash-latest')
setProvider('google')
} finally {
setIsLoadingSettings(false)
}
}
loadModelAndProvider()
Expand Down Expand Up @@ -115,64 +121,70 @@ const Popup = () => {
</button>
</div>
{showSettings ? (
<div className="settings-menu">
<div className="model-provider-selector">
<select
value={model}
onChange={handleModelChange}
className="input-common input-small"
>
<optgroup label="Google">
<option value="gemini-1.5-pro">Gemini 1.5 Pro</option>
<option value="gemini-1.5-flash-latest">Gemini 1.5 Flash</option>
</optgroup>
<optgroup label="OpenAI">
<option value="gpt-4-turbo-preview">GPT-4 Turbo</option>
<option value="gpt-4">GPT-4</option>
<option value="gpt-4o">GPT-4o</option>
<option value="gpt-4o-mini">GPT-4o-mini</option>
<option value="gpt-3.5-turbo">GPT-3.5 Turbo</option>
</optgroup>
<optgroup label="Groq">
<option value="llama2-70b-4096">LLaMA2 70B</option>
<option value="mixtral-8x7b-32768">Mixtral 8x7B</option>
</optgroup>
<option value="custom">Custom</option>
</select>
{model === 'custom' && (
<input
type="text"
value={customModel}
onChange={handleCustomModelChange}
placeholder="Enter custom model"
className="input-common input-small custom-model-input"
/>
)}
<select
value={provider}
onChange={handleProviderChange}
isLoadingSettings ? (
<div className="settings-menu">
<p>Loading settings...</p>
</div>
) : (
<div className="settings-menu">
<div className="model-provider-selector">
<select
value={model}
onChange={handleModelChange}
className="input-common input-small"
>
<optgroup label="Google">
<option value="gemini-1.5-pro">Gemini 1.5 Pro</option>
<option value="gemini-1.5-flash-latest">Gemini 1.5 Flash</option>
</optgroup>
<optgroup label="OpenAI">
<option value="gpt-4-turbo-preview">GPT-4 Turbo</option>
<option value="gpt-4">GPT-4</option>
<option value="gpt-4o">GPT-4o</option>
<option value="gpt-4o-mini">GPT-4o-mini</option>
<option value="gpt-3.5-turbo">GPT-3.5 Turbo</option>
</optgroup>
<optgroup label="Groq">
<option value="llama2-70b-4096">LLaMA2 70B</option>
<option value="mixtral-8x7b-32768">Mixtral 8x7B</option>
</optgroup>
<option value="custom">Custom</option>
</select>
{model === 'custom' && (
<input
type="text"
value={customModel}
onChange={handleCustomModelChange}
placeholder="Enter custom model"
className="input-common input-small custom-model-input"
/>
)}
<select
value={provider}
onChange={handleProviderChange}
className="input-common input-small"
>
<option value="google">Google</option>
<option value="openai">OpenAI</option>
<option value="groq">Groq</option>
<option value="custom">Custom</option>
</select>
</div>
<input
type="password"
value={apiKey}
onChange={handleApiKeyChange}
placeholder="Enter API Key"
className="input-common input-small"
>
<option value="google">Google</option>
<option value="openai">OpenAI</option>
<option value="groq">Groq</option>
<option value="custom">Custom</option>
</select>
/>
<button className="input-common input-small" onClick={handleExportLogs}>
Export Logs
</button>
<button className="input-common input-small" onClick={handleClearLogs}>
Clear Logs
</button>
</div>
<input
type="password"
value={apiKey}
onChange={handleApiKeyChange}
placeholder="Enter API Key"
className="input-common input-small"
/>
<button className="input-common input-small" onClick={handleExportLogs}>
Export Logs
</button>
<button className="input-common input-small" onClick={handleClearLogs}>
Clear Logs
</button>
</div>
)
) : !isLoading ? (
<>
<img src={logo} className="App-logo" alt="logo" />
Expand Down

0 comments on commit 8fbce5b

Please sign in to comment.