Skip to content

Commit

Permalink
Merge pull request #39 from neo4j-labs/feature/chatbot-enhancements
Browse files Browse the repository at this point in the history
Feature/chatbot enhancements
  • Loading branch information
msenechal authored Jan 8, 2025
2 parents 4674aa0 + 6fe4b4c commit 2122d93
Show file tree
Hide file tree
Showing 6 changed files with 5,719 additions and 6 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"dependencies": {
"@neo4j-ndl/base": "^3.0.14",
"@neo4j-ndl/react": "^3.0.24",
"@neo4j-nvl/react": "^0.3.6",
"@tanstack/react-table": "^8.9.3",
"autoprefixer": "^10.4.17",
"eslint-plugin-react": "^7.33.2",
Expand Down
Binary file added src/templates/shared/assets/retrieval.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
70 changes: 64 additions & 6 deletions src/templates/shared/components/Chatbot.tsx
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
/* eslint-disable no-confusing-arrow */
import { useEffect, useRef, useState } from 'react';
import { Button, Widget, Typography, Avatar, TextInput } from '@neo4j-ndl/react';
import { Button, Widget, Typography, Avatar, TextInput, IconButton, useCopyToClipboard, Modal } from '@neo4j-ndl/react';

import ChatBotAvatar from '../assets/chatbot-ai.png';
import { ArrowPathIconOutline, ClipboardDocumentIconOutline, HandThumbDownIconOutline, InformationCircleIconOutline, SpeakerWaveIconOutline } from '@neo4j-ndl/react/icons';
import RetrievalInformation from './RetrievalInformation';

type ChatbotProps = {
messages: {
Expand All @@ -11,13 +13,27 @@ type ChatbotProps = {
message: string;
datetime: string;
isTyping?: boolean;
src?: Array<string>;
}[];
};

type ChatbotResponse = {
response: string;
src: string[];
};

export default function Chatbot(props: ChatbotProps) {
const { messages } = props;
const [listMessages, setListMessages] = useState(messages);
const [inputMessage, setInputMessage] = useState('');
const [value, copy] = useCopyToClipboard();
const [isOpenModal, setIsOpenModal] = useState<boolean>(false);
const [timeTaken, setTimeTaken] = useState<number>(0);
const [sourcesModal, setSourcesModal] = useState<string[]>([]);
const [modelModal, setModelModal] = useState<string>('');

const handleCloseModal = () => setIsOpenModal(false);

const formattedTextStyle = { color: 'rgb(var(--theme-palette-discovery-bg-strong))' };

const messagesEndRef = useRef<HTMLDivElement>(null);
Expand All @@ -26,17 +42,18 @@ export default function Chatbot(props: ChatbotProps) {
setInputMessage(e.target.value);
};

const simulateTypingEffect = (responseText: string, index = 0) => {
if (index < responseText.length) {
const simulateTypingEffect = (responseText: ChatbotResponse, index = 0) => {
const msg = responseText.response;
if (index < msg.length) {
const nextIndex = index + 1;
const currentTypedText = responseText.substring(0, nextIndex);
const currentTypedText = msg.substring(0, nextIndex);

if (index === 0) {
const date = new Date();
const datetime = `${date.toLocaleDateString()} ${date.toLocaleTimeString()}`;
setListMessages((msgs) => [
...msgs,
{ id: Date.now(), user: 'chatbot', message: currentTypedText, datetime: datetime, isTyping: true },
{ id: Date.now(), user: 'chatbot', message: currentTypedText, datetime: datetime, isTyping: true, src: responseText.src },
]);
} else {
setListMessages((msgs) => msgs.map((msg) => (msg.isTyping ? { ...msg, message: currentTypedText } : msg)));
Expand All @@ -59,7 +76,7 @@ export default function Chatbot(props: ChatbotProps) {
setListMessages((listMessages) => [...listMessages, userMessage]);
setInputMessage('');

const chatbotReply = 'Hello, how can I help you today?'; // Replace with getting a response from your chatbot through your APIs
const chatbotReply = {response: 'Hello, here is an example response with sources. To use the chatbot, plug this to your backend with a fetch containing an object response of type: {response: string, src: Array<string>}', src: ["1:1234-abcd-efgh-ijkl-5678:2", "3:8765-zyxw-vuts-rqpo-4321:4"]}; // Replace with getting a response from your chatbot through your APIs
simulateTypingEffect(chatbotReply);
};

Expand Down Expand Up @@ -127,6 +144,39 @@ export default function Chatbot(props: ChatbotProps) {
<div className='text-right align-bottom pt-3'>
<Typography variant='body-small'>{chat.datetime}</Typography>
</div>
<Typography variant='body-small' className='text-right'>
{chat.user === 'chatbot' ? (
<div className='flex gap-1'>
<>
<IconButton isClean ariaLabel="Search Icon">
<SpeakerWaveIconOutline className="w-4 h-4 inline-block"/>
</IconButton>
{chat.src ? (
<IconButton isClean ariaLabel="Search Icon"
onClick={() => {
setModelModal('OpenAI GPT 4o');
setSourcesModal(chat.src ?? []);
setTimeTaken(50);
setIsOpenModal(true)
}}>
<InformationCircleIconOutline className='w-4 h-4 inline-block' />
</IconButton>
) : <></>}
<IconButton isClean ariaLabel="Search Icon" onClick={() => copy(chat.message)} >
<ClipboardDocumentIconOutline className='w-4 h-4 inline-block' />
</IconButton>
<IconButton isClean ariaLabel="Search Icon">
<ArrowPathIconOutline className='w-4 h-4 inline-block' />
</IconButton>
<IconButton isClean ariaLabel="Search Icon">
<HandThumbDownIconOutline className='w-4 h-4 inline-block n-text-palette-danger-text' />
</IconButton>
</>
</div>
) : (
<></>
)}
</Typography>
</Widget>
</div>
))}
Expand All @@ -142,11 +192,19 @@ export default function Chatbot(props: ChatbotProps) {
onChange={handleInputChange}
htmlAttributes={{
type: 'text',
"aria-label": "Chatbot Input",
}}
/>
<Button type='submit'>Submit</Button>
</form>
</div>

<Modal modalProps={{
id: 'default-menu',
className: 'n-p-token-4 n-bg-palette-neutral-bg-weak n-rounded-lg min-w-[60%]',
}} onClose={handleCloseModal} isOpen={isOpenModal}>
<RetrievalInformation sources={sourcesModal} model={modelModal} timeTaken={timeTaken} />
</Modal>
</div>
);
}
122 changes: 122 additions & 0 deletions src/templates/shared/components/RetrievalInformation.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import { useEffect, useRef, useState } from 'react';

import { Box, Flex, IconButton, Typography } from '@neo4j-ndl/react';
import { ClockIconOutline, FitToScreenIcon, ResetZoomIcon } from '@neo4j-ndl/react/icons';
import retrievalIllustration from '../assets/retrieval.png';

import type { NVL, HitTargets, Node, Relationship } from '@neo4j-nvl/base'
import { InteractiveNvlWrapper } from '@neo4j-nvl/react'
import type { MouseEventCallbacks } from '@neo4j-nvl/react'
import { runRAGQuery, setDriver } from '../utils/Driver';

type RetrievalProps = {
sources: Array<string>;
model: string;
timeTaken: number;
};

function RetrievalInformation(props: RetrievalProps) {
const nvl = useRef<NVL | null>(null)

const [nodes, setNodes] = useState<Node[]>([])
const [rels, setRels] = useState<Relationship[]>([])

const mouseEventCallbacks: MouseEventCallbacks = {
onHover: (_element: Node | Relationship, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onRelationshipRightClick: (_rel: Relationship, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onNodeClick: (_node: Node, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onNodeRightClick: (_node: Node, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onNodeDoubleClick: (_node: Node, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onRelationshipClick: (_rel: Relationship, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onRelationshipDoubleClick: (_rel: Relationship, _hitTargets: HitTargets, _evt: MouseEvent) => null,
onCanvasClick: (_evt: MouseEvent) => null,
onCanvasDoubleClick: (_evt: MouseEvent) => null,
onCanvasRightClick: (_evt: MouseEvent) => null,
onDrag: (_nodes: Node[]) => null,
onPan: (_panning: { x: number; y: number }, _evt: MouseEvent) => null,
onZoom: (_zoomLevel: number) => null
}

const fitNodes = () => {
nvl.current?.fit(nodes.map((n) => n.id))
}
const resetZoom = () => {
nvl.current?.resetZoom()
}

function retrieveSources() {
// This is only for rendering the sources nodes. Ideally, for each of the sources, you would use your retrieval query to get the nodes and relationships
// Example:
// setDriver('bolt://localhost:7687', 'neo4j', 'password');
// runRAGQuery(props.sources).then((nvlGraph) => {
// setNodes(nvlGraph.nodes);
// setRels(nvlGraph.relationships);
// });
const retrievedNodes = props.sources.map((source, index) => ({
id: `${index}`,
color: '#0A6190',
captions: [{ value: source }]
}));
setNodes(retrievedNodes);
setRels([{ id: '10', from: '0', to: '1', captions: [{ value: 'MOCKUP_DATA' }] }]);
}

useEffect(() => {
retrieveSources();
}, []);

return (
<Box className="n-bg-palette-neutral-bg-weak p-4">
<Flex flexDirection='row' className='flex flex-row p-6 items-center'>
<img src={retrievalIllustration} alt="icon" style={{ width: 95, height: 95, marginRight: 10 }} />
<Box className='flex flex-col'>
<Typography variant="h2">Retrieval information</Typography>
<Typography className="mb-2" variant="body-medium">
To generate this response, we used the model <span className='font-bold italic'>{props.model}</span>.
<Typography className="pl-1 italic" variant="body-small"><ClockIconOutline className="w-4 h-4 inline-block mb-1" /> {props.timeTaken / 1000} seconds</Typography>
</Typography>
</Box>
</Flex>
<Box className="button-container flex justify-between mt-2">
<div
style={{
margin: 10,
borderRadius: 25,
border: '2px solid #2AADA5',
height: 800,
background: `rgb(var(--theme-palette-primary-bg-weaker))`,
boxShadow: `2px -2px 10px grey`,
position: 'relative',
}}>
<Flex flexDirection='row' className='flex flex-row p-6' style={{
position: 'absolute',
top: 0,
right: 0,
zIndex: 1000
}}>
<IconButton className="n-size-token-7" ariaLabel="Fit to screen" onClick={fitNodes}>
<FitToScreenIcon />
</IconButton>
<IconButton className="n-size-token-7" ariaLabel="Reset zoom" onClick={resetZoom}>
<ResetZoomIcon />
</IconButton>
</Flex>
<InteractiveNvlWrapper
ref={nvl}
nodes={nodes}
rels={rels}
onClick={(evt) => console.log('custom click event', evt)}
mouseEventCallbacks={mouseEventCallbacks}
nvlOptions={{
initialZoom: 2,
layout: 'd3Force',
relationshipThreshold: 1,
}}
/>
</div>
</Box>
</Box>
);
}

export default RetrievalInformation;
28 changes: 28 additions & 0 deletions src/templates/shared/utils/Driver.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/* eslint-disable no-console */
import neo4j, { Driver } from 'neo4j-driver';
import { nvlResultTransformer } from "@neo4j-nvl/base";

export let driver: Driver;

Expand Down Expand Up @@ -28,6 +29,33 @@ export async function disconnect() {
}
}

export async function runRAGQuery(sources: Array<string>) {
// Customize the RETRIEVAL_QUERY to match your needs
const formattedSources = sources.map(source => `'${source}'`).join(', ');
const RETRIEVAL_QUERY = `MATCH (a)-[r]->(b) WHERE elementId(a) IN [${formattedSources}] RETURN a, r, b LIMIT 25`;
const nvlGraph = await driver.executeQuery(
RETRIEVAL_QUERY,
{},
{ resultTransformer: nvlResultTransformer }
);
const nodes = nvlGraph.nodes.map((node) => {
const {properties, labels} = nvlGraph.recordObjectMap.get(node.id);
return {
...node,
caption: properties.name ?? labels[0] ,
};
});
console.log(nodes);
const relationships = nvlGraph.relationships.map(rel => {
const or = nvlGraph.recordObjectMap.get(rel.id)
return {
...rel,
caption: or.type
}
});
return { nodes, relationships };
}

/*
Everything below this line is only for providing examples based on datasets available in Neo4j Sandbox (sandbox.neo4j.com).
When using this code in your own project, you should remove the examples below and use your own queries.
Expand Down
Loading

0 comments on commit 2122d93

Please sign in to comment.